Compare commits

..

5 Commits

Author SHA1 Message Date
Piotr Osiewicz
02a8a7dad2 Merge branch 'main' into multi-line-highlight-stack-trace 2025-06-16 12:32:27 +02:00
Piotr Osiewicz
1815b534f0 WIP2 2025-06-09 18:00:41 +02:00
Piotr Osiewicz
d08dd46186 clippy 2025-06-09 13:22:57 +02:00
Piotr Osiewicz
688375619c Merge branch 'main' into multi-line-highlight-stack-trace 2025-06-09 12:48:04 +02:00
Piotr Osiewicz
2ea4819127 wip 2025-06-09 00:16:19 +02:00
135 changed files with 2362 additions and 4977 deletions

View File

@@ -10,8 +10,8 @@ inputs:
runs:
using: "composite"
steps:
- name: Install test runner
shell: powershell
- name: Install Rust
shell: pwsh
working-directory: ${{ inputs.working-directory }}
run: cargo install cargo-nextest --locked
@@ -21,6 +21,6 @@ runs:
node-version: "18"
- name: Run tests
shell: powershell
shell: pwsh
working-directory: ${{ inputs.working-directory }}
run: cargo nextest run --workspace --no-fail-fast
run: cargo nextest run --workspace --no-fail-fast --config='profile.dev.debug="limited"'

View File

@@ -373,6 +373,64 @@ jobs:
if: always()
run: rm -rf ./../.cargo
windows_clippy:
timeout-minutes: 60
name: (Windows) Run Clippy
needs: [job_spec]
if: |
github.repository_owner == 'zed-industries' &&
needs.job_spec.outputs.run_tests == 'true'
runs-on: windows-2025-16
steps:
# more info here:- https://github.com/rust-lang/cargo/issues/13020
- name: Enable longer pathnames for git
run: git config --system core.longpaths true
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Create Dev Drive using ReFS
run: ./script/setup-dev-driver.ps1
# actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone...
- name: Copy Git Repo to Dev Drive
run: |
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
- name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
workspaces: ${{ env.ZED_WORKSPACE }}
cache-provider: "github"
- name: Configure CI
run: |
mkdir -p ${{ env.CARGO_HOME }} -ErrorAction Ignore
cp ./.cargo/ci-config.toml ${{ env.CARGO_HOME }}/config.toml
- name: cargo clippy
working-directory: ${{ env.ZED_WORKSPACE }}
run: ./script/clippy.ps1
- name: Check dev drive space
working-directory: ${{ env.ZED_WORKSPACE }}
# `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file
if: always()
run: |
if (Test-Path "${{ env.CARGO_HOME }}/config.toml") {
Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
}
# Windows CI takes twice as long as our other platforms and fast github hosted runners are expensive.
# But we still want to do CI, so let's only run tests on main and come back to this when we're
# ready to self host our Windows CI (e.g. during the push for full Windows support)
windows_tests:
timeout-minutes: 60
name: (Windows) Run Tests
@@ -380,45 +438,51 @@ jobs:
if: |
github.repository_owner == 'zed-industries' &&
needs.job_spec.outputs.run_tests == 'true'
runs-on: [self-hosted, Windows, X64]
# Use bigger runners for PRs (speed); smaller for async (cost)
runs-on: ${{ github.event_name == 'pull_request' && 'windows-2025-32' || 'windows-2025-16' }}
steps:
- name: Environment Setup
run: |
$RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE
Write-Output `
"RUSTUP_HOME=$RunnerDir\.rustup" `
"CARGO_HOME=$RunnerDir\.cargo" `
"PATH=$RunnerDir\.cargo\bin;$env:PATH" `
>> $env:GITHUB_ENV
# more info here:- https://github.com/rust-lang/cargo/issues/13020
- name: Enable longer pathnames for git
run: git config --system core.longpaths true
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Setup Cargo and Rustup
- name: Create Dev Drive using ReFS
run: ./script/setup-dev-driver.ps1
# actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone...
- name: Copy Git Repo to Dev Drive
run: |
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
- name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
workspaces: ${{ env.ZED_WORKSPACE }}
cache-provider: "github"
- name: Configure CI
run: |
mkdir -p ${{ env.CARGO_HOME }} -ErrorAction Ignore
cp ./.cargo/ci-config.toml ${{ env.CARGO_HOME }}/config.toml
.\script\install-rustup.ps1
- name: cargo clippy
run: |
.\script\clippy.ps1
- name: Run tests
uses: ./.github/actions/run_tests_windows
with:
working-directory: ${{ env.ZED_WORKSPACE }}
- name: Build Zed
working-directory: ${{ env.ZED_WORKSPACE }}
run: cargo build
- name: Limit target directory size
run: ./script/clear-target-dir-if-larger-than.ps1 250
# - name: Check dev drive space
# working-directory: ${{ env.ZED_WORKSPACE }}
# # `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
# run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
- name: Check dev drive space
working-directory: ${{ env.ZED_WORKSPACE }}
# `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file
@@ -441,6 +505,7 @@ jobs:
- linux_tests
- build_remote_server
- macos_tests
- windows_clippy
- windows_tests
if: |
github.repository_owner == 'zed-industries' &&
@@ -460,6 +525,7 @@ jobs:
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }

6
Cargo.lock generated
View File

@@ -64,7 +64,6 @@ dependencies = [
"buffer_diff",
"chrono",
"client",
"clock",
"collections",
"component",
"context_server",
@@ -4243,7 +4242,6 @@ dependencies = [
"gpui",
"serde_json",
"task",
"util",
"workspace-hack",
]
@@ -4269,7 +4267,6 @@ dependencies = [
name = "debugger_ui"
version = "0.1.0"
dependencies = [
"alacritty_terminal",
"anyhow",
"client",
"collections",
@@ -8946,7 +8943,6 @@ dependencies = [
"http_client",
"icons",
"image",
"log",
"parking_lot",
"proto",
"schemars",
@@ -8982,7 +8978,6 @@ dependencies = [
"gpui",
"gpui_tokio",
"http_client",
"language",
"language_model",
"lmstudio",
"log",
@@ -15838,7 +15833,6 @@ dependencies = [
"serde_json_lenient",
"sha2",
"shellexpand 2.1.2",
"smol",
"util",
"workspace-hack",
"zed_actions",

View File

@@ -33,7 +33,6 @@ collections.workspace = true
component.workspace = true
context_server.workspace = true
convert_case.workspace = true
clock.workspace = true
db.workspace = true
editor.workspace = true
extension.workspace = true

View File

@@ -586,7 +586,7 @@ impl AgentConfiguration {
if let Some(server) =
this.get_server(&context_server_id)
{
this.start_server(server, cx);
this.start_server(server, cx).log_err();
}
})
}

View File

@@ -1,6 +1,7 @@
use context_server::ContextServerCommand;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, WeakEntity, prelude::*};
use project::project_settings::{ContextServerSettings, ProjectSettings};
use project::project_settings::{ContextServerConfiguration, ProjectSettings};
use serde_json::json;
use settings::update_settings_file;
use ui::{KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*};
use ui_input::SingleLineInput;
@@ -80,12 +81,13 @@ impl AddContextServerModal {
update_settings_file::<ProjectSettings>(fs.clone(), cx, |settings, _| {
settings.context_servers.insert(
name.into(),
ContextServerSettings::Custom {
command: ContextServerCommand {
ContextServerConfiguration {
command: Some(ContextServerCommand {
path,
args,
env: None,
},
}),
settings: Some(json!({})),
},
);
});

View File

@@ -15,7 +15,7 @@ use markdown::{Markdown, MarkdownElement, MarkdownStyle};
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
context_server_store::{ContextServerStatus, ContextServerStore},
project_settings::{ContextServerSettings, ProjectSettings},
project_settings::{ContextServerConfiguration, ProjectSettings},
};
use settings::{Settings as _, update_settings_file};
use theme::ThemeSettings;
@@ -175,9 +175,8 @@ impl ConfigureContextServerModal {
let settings_changed = ProjectSettings::get_global(cx)
.context_servers
.get(&id.0)
.map_or(true, |settings| match settings {
ContextServerSettings::Custom { .. } => false,
ContextServerSettings::Extension { settings } => settings != &settings_value,
.map_or(true, |config| {
config.settings.as_ref() != Some(&settings_value)
});
let is_running = self.context_server_store.read(cx).status_for_server(&id)
@@ -222,12 +221,17 @@ impl ConfigureContextServerModal {
update_settings_file::<ProjectSettings>(workspace.read(cx).app_state().fs.clone(), cx, {
let id = id.clone();
|settings, _| {
settings.context_servers.insert(
id.0,
ContextServerSettings::Extension {
settings: settings_value,
},
);
if let Some(server_config) = settings.context_servers.get_mut(&id.0) {
server_config.settings = Some(settings_value);
} else {
settings.context_servers.insert(
id.0,
ContextServerConfiguration {
settings: Some(settings_value),
..Default::default()
},
);
}
}
});
}

View File

@@ -520,15 +520,10 @@ impl AgentPanel {
});
let message_editor_subscription =
cx.subscribe(&message_editor, |this, _, event, cx| match event {
cx.subscribe(&message_editor, |_, _, event, cx| match event {
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
cx.notify();
}
MessageEditorEvent::ScrollThreadToBottom => {
this.thread.update(cx, |thread, cx| {
thread.scroll_to_bottom(cx);
});
}
});
let thread_id = thread.read(cx).id().clone();
@@ -808,15 +803,10 @@ impl AgentPanel {
self.message_editor.focus_handle(cx).focus(window);
let message_editor_subscription =
cx.subscribe(&self.message_editor, |this, _, event, cx| match event {
cx.subscribe(&self.message_editor, |_, _, event, cx| match event {
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
cx.notify();
}
MessageEditorEvent::ScrollThreadToBottom => {
this.thread.update(cx, |thread, cx| {
thread.scroll_to_bottom(cx);
});
}
});
self._active_thread_subscriptions = vec![
@@ -1028,15 +1018,10 @@ impl AgentPanel {
self.message_editor.focus_handle(cx).focus(window);
let message_editor_subscription =
cx.subscribe(&self.message_editor, |this, _, event, cx| match event {
cx.subscribe(&self.message_editor, |_, _, event, cx| match event {
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
cx.notify();
}
MessageEditorEvent::ScrollThreadToBottom => {
this.thread.update(cx, |thread, cx| {
thread.scroll_to_bottom(cx);
});
}
});
self._active_thread_subscriptions = vec![

View File

@@ -765,6 +765,9 @@ impl InlineAssistant {
PromptEditorEvent::CancelRequested => {
self.finish_assist(assist_id, true, window, cx);
}
PromptEditorEvent::DismissRequested => {
self.dismiss_assist(assist_id, window, cx);
}
PromptEditorEvent::Resized { .. } => {
// This only matters for the terminal inline assistant
}
@@ -1350,18 +1353,11 @@ impl InlineAssistant {
editor.clear_highlights::<InlineAssist>(cx);
} else {
editor.highlight_text::<InlineAssist>(
foreground_ranges
.into_iter()
.map(|range| {
(
range,
HighlightStyle {
fade_out: Some(0.6),
..Default::default()
},
)
})
.collect(),
foreground_ranges,
HighlightStyle {
fade_out: Some(0.6),
..Default::default()
},
cx,
);
}

View File

@@ -403,7 +403,9 @@ impl<T: 'static> PromptEditor<T> {
CodegenStatus::Idle => {
cx.emit(PromptEditorEvent::StartRequested);
}
CodegenStatus::Pending => {}
CodegenStatus::Pending => {
cx.emit(PromptEditorEvent::DismissRequested);
}
CodegenStatus::Done => {
if self.edited_since_done {
cx.emit(PromptEditorEvent::StartRequested);
@@ -829,6 +831,7 @@ pub enum PromptEditorEvent {
StopRequested,
ConfirmRequested { execute: bool },
CancelRequested,
DismissRequested,
Resized { height_in_lines: u8 },
}

View File

@@ -39,9 +39,7 @@ use proto::Plan;
use settings::Settings;
use std::time::Duration;
use theme::ThemeSettings;
use ui::{
Callout, Disclosure, Divider, DividerColor, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*,
};
use ui::{Callout, Disclosure, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*};
use util::{ResultExt as _, maybe};
use workspace::{CollaboratorId, Workspace};
use zed_llm_client::CompletionIntent;
@@ -301,7 +299,6 @@ impl MessageEditor {
self.set_editor_is_expanded(false, cx);
self.send_to_model(window, cx);
cx.emit(MessageEditorEvent::ScrollThreadToBottom);
cx.notify();
}
@@ -508,46 +505,6 @@ impl MessageEditor {
cx.notify();
}
fn handle_reject_file_changes(
&mut self,
buffer: Entity<Buffer>,
_window: &mut Window,
cx: &mut Context<Self>,
) {
if self.thread.read(cx).has_pending_edit_tool_uses() {
return;
}
self.thread.update(cx, |thread, cx| {
let buffer_snapshot = buffer.read(cx);
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
thread
.reject_edits_in_ranges(buffer, vec![start..end], cx)
.detach();
});
cx.notify();
}
fn handle_accept_file_changes(
&mut self,
buffer: Entity<Buffer>,
_window: &mut Window,
cx: &mut Context<Self>,
) {
if self.thread.read(cx).has_pending_edit_tool_uses() {
return;
}
self.thread.update(cx, |thread, cx| {
let buffer_snapshot = buffer.read(cx);
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
thread.keep_edits_in_range(buffer, start..end, cx);
});
cx.notify();
}
fn render_burn_mode_toggle(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
let thread = self.thread.read(cx);
let model = thread.configured_model();
@@ -907,7 +864,7 @@ impl MessageEditor {
)
}
fn render_edits_bar(
fn render_changed_buffers(
&self,
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
window: &mut Window,
@@ -1031,7 +988,7 @@ impl MessageEditor {
this.handle_review_click(window, cx)
})),
)
.child(Divider::vertical().color(DividerColor::Border))
.child(ui::Divider::vertical().color(ui::DividerColor::Border))
.child(
Button::new("reject-all-changes", "Reject All")
.label_size(LabelSize::Small)
@@ -1081,7 +1038,7 @@ impl MessageEditor {
let file = buffer.read(cx).file()?;
let path = file.path();
let file_path = path.parent().and_then(|parent| {
let parent_label = path.parent().and_then(|parent| {
let parent_str = parent.to_string_lossy();
if parent_str.is_empty() {
@@ -1100,7 +1057,7 @@ impl MessageEditor {
}
});
let file_name = path.file_name().map(|name| {
let name_label = path.file_name().map(|name| {
Label::new(name.to_string_lossy().to_string())
.size(LabelSize::XSmall)
.buffer_font(cx)
@@ -1115,22 +1072,36 @@ impl MessageEditor {
.size(IconSize::Small)
});
let hover_color = cx
.theme()
.colors()
.element_background
.blend(cx.theme().colors().editor_foreground.opacity(0.025));
let overlay_gradient = linear_gradient(
90.,
linear_color_stop(editor_bg_color, 1.),
linear_color_stop(editor_bg_color.opacity(0.2), 0.),
);
let overlay_gradient_hover = linear_gradient(
90.,
linear_color_stop(hover_color, 1.),
linear_color_stop(hover_color.opacity(0.2), 0.),
);
let element = h_flex()
.group("edited-code")
.id(("file-container", index))
.cursor_pointer()
.relative()
.py_1()
.pl_2()
.pr_1()
.gap_2()
.justify_between()
.bg(editor_bg_color)
.bg(cx.theme().colors().editor_background)
.hover(|style| style.bg(hover_color))
.when(index < changed_buffers.len() - 1, |parent| {
parent.border_color(border_color).border_b_1()
})
@@ -1145,75 +1116,47 @@ impl MessageEditor {
.child(
h_flex()
.gap_0p5()
.children(file_name)
.children(file_path),
.children(name_label)
.children(parent_label),
), // TODO: Implement line diff
// .child(Label::new("+").color(Color::Created))
// .child(Label::new("-").color(Color::Deleted)),
)
.child(
h_flex()
.gap_1()
.visible_on_hover("edited-code")
.child(
Button::new("review", "Review")
.label_size(LabelSize::Small)
.on_click({
let buffer = buffer.clone();
cx.listener(move |this, _, window, cx| {
this.handle_file_click(
buffer.clone(),
window,
cx,
);
})
}),
)
.child(
Divider::vertical().color(DividerColor::BorderVariant),
)
.child(
Button::new("reject-file", "Reject")
.label_size(LabelSize::Small)
.disabled(pending_edits)
.on_click({
let buffer = buffer.clone();
cx.listener(move |this, _, window, cx| {
this.handle_reject_file_changes(
buffer.clone(),
window,
cx,
);
})
}),
)
.child(
Button::new("accept-file", "Accept")
.label_size(LabelSize::Small)
.disabled(pending_edits)
.on_click({
let buffer = buffer.clone();
cx.listener(move |this, _, window, cx| {
this.handle_accept_file_changes(
buffer.clone(),
window,
cx,
);
})
}),
),
div().visible_on_hover("edited-code").child(
Button::new("review", "Review")
.label_size(LabelSize::Small)
.on_click({
let buffer = buffer.clone();
cx.listener(move |this, _, window, cx| {
this.handle_file_click(
buffer.clone(),
window,
cx,
);
})
}),
),
)
.child(
div()
.id("gradient-overlay")
.absolute()
.h_full()
.h_5_6()
.w_12()
.top_0()
.bottom_0()
.right(px(152.))
.bg(overlay_gradient),
);
.right(px(52.))
.bg(overlay_gradient)
.group_hover("edited-code", |style| {
style.bg(overlay_gradient_hover)
}),
)
.on_click({
let buffer = buffer.clone();
cx.listener(move |this, _, window, cx| {
this.handle_file_click(buffer.clone(), window, cx);
})
});
Some(element)
},
@@ -1511,7 +1454,6 @@ impl EventEmitter<MessageEditorEvent> for MessageEditor {}
pub enum MessageEditorEvent {
EstimatedTokenCount,
Changed,
ScrollThreadToBottom,
}
impl Focusable for MessageEditor {
@@ -1539,7 +1481,7 @@ impl Render for MessageEditor {
v_flex()
.size_full()
.when(changed_buffers.len() > 0, |parent| {
parent.child(self.render_edits_bar(&changed_buffers, window, cx))
parent.child(self.render_changed_buffers(&changed_buffers, window, cx))
})
.child(self.render_editor(window, cx))
.children({

View File

@@ -1,3 +1 @@
[The following is an auto-generated notification; do not reply]
These files have changed since the last read:
These files changed since last read:

View File

@@ -167,6 +167,9 @@ impl TerminalInlineAssistant {
PromptEditorEvent::CancelRequested => {
self.finish_assist(assist_id, true, false, window, cx);
}
PromptEditorEvent::DismissRequested => {
self.dismiss_assist(assist_id, window, cx);
}
PromptEditorEvent::Resized { height_in_lines } => {
self.insert_prompt_editor_into_terminal(assist_id, *height_in_lines, window, cx);
}

View File

@@ -1,4 +1,3 @@
use std::collections::BTreeMap;
use std::fmt::Write as _;
use std::io::Write;
use std::ops::Range;
@@ -19,7 +18,6 @@ use gpui::{
AnyWindowHandle, App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task,
WeakEntity,
};
use language::Buffer;
use language_model::{
ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelKnownError, LanguageModelRegistry, LanguageModelRequest,
@@ -346,7 +344,6 @@ pub struct Thread {
tools: Entity<ToolWorkingSet>,
tool_use: ToolUseState,
action_log: Entity<ActionLog>,
last_buffer_notifications: BTreeMap<Entity<Buffer>, clock::Global>,
last_restore_checkpoint: Option<LastRestoreCheckpoint>,
pending_checkpoint: Option<ThreadCheckpoint>,
initial_project_snapshot: Shared<Task<Option<Arc<ProjectSnapshot>>>>,
@@ -437,7 +434,6 @@ impl Thread {
pending_checkpoint: None,
tool_use: ToolUseState::new(tools.clone()),
action_log: cx.new(|_| ActionLog::new(project.clone())),
last_buffer_notifications: BTreeMap::new(),
initial_project_snapshot: {
let project_snapshot = Self::project_snapshot(project, cx);
cx.foreground_executor()
@@ -568,7 +564,6 @@ impl Thread {
tools: tools.clone(),
tool_use,
action_log: cx.new(|_| ActionLog::new(project)),
last_buffer_notifications: BTreeMap::new(),
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
request_token_usage: serialized.request_token_usage,
cumulative_token_usage: serialized.cumulative_token_usage,
@@ -1050,33 +1045,6 @@ impl Thread {
id
}
pub fn insert_message_at(
&mut self,
index: usize,
role: Role,
segments: Vec<MessageSegment>,
loaded_context: LoadedContext,
creases: Vec<MessageCrease>,
is_hidden: bool,
cx: &mut Context<Self>,
) -> MessageId {
let id = self.next_message_id.post_inc();
self.messages.insert(
index,
Message {
id,
role,
segments,
loaded_context,
creases,
is_hidden,
},
);
self.touch_updated_at();
cx.emit(ThreadEvent::MessageAdded(id));
id
}
pub fn edit_message(
&mut self,
id: MessageId,
@@ -1256,19 +1224,6 @@ impl Thread {
self.remaining_turns -= 1;
match intent {
CompletionIntent::UserPrompt | CompletionIntent::ToolResults => {
self.attach_tracked_files_state(cx);
}
CompletionIntent::ThreadSummarization
| CompletionIntent::ThreadContextSummarization
| CompletionIntent::CreateFile
| CompletionIntent::EditFile
| CompletionIntent::InlineAssist
| CompletionIntent::TerminalInlineAssist
| CompletionIntent::GenerateGitCommitMessage => {}
};
let request = self.to_completion_request(model.clone(), intent, cx);
self.stream_completion(request, model, window, cx);
@@ -1434,6 +1389,8 @@ impl Thread {
request.messages[message_ix_to_cache].cache = true;
}
self.attached_tracked_files_state(&mut request.messages, cx);
request.tools = available_tools;
request.mode = if model.supports_max_mode() {
Some(self.completion_mode.into())
@@ -1496,48 +1453,44 @@ impl Thread {
request
}
pub fn attach_tracked_files_state(&mut self, cx: &mut Context<Self>) {
let action_log = self.action_log.read(cx);
let mut stale_files = String::new();
for stale_buffer in action_log.stale_buffers(cx) {
let version = stale_buffer.read(cx).version();
if self.last_buffer_notifications.get(&stale_buffer) != Some(&version) {
if let Some(file) = stale_buffer.read(cx).file() {
self.last_buffer_notifications
.insert(stale_buffer.clone(), version);
writeln!(&mut stale_files, "- {}", file.path().display()).ok();
}
}
}
if stale_files.is_empty() {
return;
}
// NOTE: Changes to this prompt require a symmetric update in the LLM Worker
fn attached_tracked_files_state(
&self,
messages: &mut Vec<LanguageModelRequestMessage>,
cx: &App,
) {
const STALE_FILES_HEADER: &str = include_str!("./prompts/stale_files_prompt_header.txt");
let content = format!("{STALE_FILES_HEADER}{stale_files}").replace("\r\n", "\n");
// Insert our message before the last Assistant message.
// Inserting it to the tail distracts the agent too much
let insert_position = self
.messages
.iter()
.enumerate()
.rfind(|(_, message)| message.role == Role::Assistant)
.map_or(self.messages.len(), |(i, _)| i);
let mut stale_message = String::new();
let is_hidden = true;
self.insert_message_at(
insert_position,
Role::User,
vec![MessageSegment::Text(content)],
LoadedContext::default(),
Vec::new(),
is_hidden,
cx,
);
let action_log = self.action_log.read(cx);
for stale_file in action_log.stale_buffers(cx) {
let Some(file) = stale_file.read(cx).file() else {
continue;
};
if stale_message.is_empty() {
write!(&mut stale_message, "{}\n", STALE_FILES_HEADER.trim()).ok();
}
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
}
let mut content = Vec::with_capacity(2);
if !stale_message.is_empty() {
content.push(stale_message.into());
}
if !content.is_empty() {
let context_message = LanguageModelRequestMessage {
role: Role::User,
content,
cache: false,
};
messages.push(context_message);
}
}
pub fn stream_completion(
@@ -3272,7 +3225,7 @@ fn main() {{
)
.await;
let (_workspace, _thread_store, thread, context_store, _model) =
let (_workspace, _thread_store, thread, context_store, model) =
setup_test_environment(cx, project.clone()).await;
// Open buffer and add it to context
@@ -3291,14 +3244,24 @@ fn main() {{
thread.insert_user_message("Explain this code", loaded_context, None, Vec::new(), cx)
});
// Initially, no messages should have stale buffer notification
thread.read_with(cx, |thread, _| {
assert_eq!(thread.messages.len(), 1);
assert!(!thread.messages[0].is_hidden);
// Create a request and check that it doesn't have a stale buffer warning yet
let initial_request = thread.update(cx, |thread, cx| {
thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx)
});
// Modify the buffer to make it stale
// Make sure we don't have a stale file warning yet
let has_stale_warning = initial_request.messages.iter().any(|msg| {
msg.string_contents()
.contains("These files changed since last read:")
});
assert!(
!has_stale_warning,
"Should not have stale buffer warning before buffer is modified"
);
// Modify the buffer
buffer.update(cx, |buffer, cx| {
// Find a position at the end of line 1
buffer.edit(
[(1..1, "\n println!(\"Added a new line\");\n")],
None,
@@ -3306,63 +3269,38 @@ fn main() {{
);
});
// Check that the stale file notification was added
// Insert another user message without context
thread.update(cx, |thread, cx| {
thread.attach_tracked_files_state(cx);
});
thread.read_with(cx, |thread, _| {
// Should have 2 messages now: original user message + hidden stale notification
assert_eq!(thread.messages.len(), 2);
let stale_msg = &thread.messages[1];
assert!(stale_msg.is_hidden, "Stale notification should be hidden");
assert_eq!(stale_msg.role, Role::User);
let expected_content = "[The following is an auto-generated notification; do not reply]\n\nThese files have changed since the last read:\n- code.rs\n";
assert_eq!(
stale_msg.to_string(),
expected_content,
"Stale buffer notification should have the correct format"
);
});
// Test that calling attach_tracked_files_state again doesn't add duplicate notifications
thread.update(cx, |thread, cx| {
thread.attach_tracked_files_state(cx);
});
thread.read_with(cx, |thread, _| {
assert_eq!(thread.messages.len(), 2);
});
// Test with assistant message - notification should be inserted before it
thread.update(cx, |thread, cx| {
thread.insert_assistant_message(
vec![MessageSegment::Text("Here's an explanation...".into())],
thread.insert_user_message(
"What does the code do now?",
ContextLoadResult::default(),
None,
Vec::new(),
cx,
);
)
});
// Modify buffer again to create a new version
buffer.update(cx, |buffer, cx| {
buffer.edit([(1..1, "\n // Another change\n")], None, cx);
// Create a new request and check for the stale buffer warning
let new_request = thread.update(cx, |thread, cx| {
thread.to_completion_request(model.clone(), CompletionIntent::UserPrompt, cx)
});
thread.update(cx, |thread, cx| {
thread.attach_tracked_files_state(cx);
});
// We should have a stale file warning as the last message
let last_message = new_request
.messages
.last()
.expect("Request should have messages");
thread.read_with(cx, |thread, _| {
// Should have 4 messages: user, stale notification, new stale notification, assistant
assert_eq!(thread.messages.len(), 4);
// The last message should be the stale buffer notification
assert_eq!(last_message.role, Role::User);
// The new stale notification should be inserted before the assistant message
let new_stale_msg = &thread.messages[2];
assert!(
new_stale_msg
.to_string()
.contains("These files have changed since the last read:")
);
});
// Check the exact content of the message
let expected_content = "These files changed since last read:\n- code.rs\n";
assert_eq!(
last_message.string_contents(),
expected_content,
"Last message should be exactly the stale buffer notification"
);
}
#[gpui::test]

View File

@@ -456,18 +456,18 @@ impl ActionLog {
})?
}
/// Track a buffer as read by agent, so we can notify the model about user edits.
/// Track a buffer as read, so we can notify the model about user edits.
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
self.track_buffer_internal(buffer, false, cx);
}
/// Mark a buffer as created by agent, so we can refresh it in the context
/// Mark a buffer as edited, so we can refresh it in the context
pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
self.edited_since_project_diagnostics_check = true;
self.track_buffer_internal(buffer.clone(), true, cx);
}
/// Mark a buffer as edited by agent, so we can refresh it in the context
/// Mark a buffer as edited, so we can refresh it in the context
pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
self.edited_since_project_diagnostics_check = true;

View File

@@ -8,7 +8,6 @@ use crate::{Template, Templates};
use anyhow::Result;
use assistant_tool::ActionLog;
use create_file_parser::{CreateFileParser, CreateFileParserEvent};
pub use edit_parser::EditFormat;
use edit_parser::{EditParser, EditParserEvent, EditParserMetrics};
use futures::{
Stream, StreamExt,
@@ -42,23 +41,13 @@ impl Template for CreateFilePromptTemplate {
}
#[derive(Serialize)]
struct EditFileXmlPromptTemplate {
struct EditFilePromptTemplate {
path: Option<PathBuf>,
edit_description: String,
}
impl Template for EditFileXmlPromptTemplate {
const TEMPLATE_NAME: &'static str = "edit_file_prompt_xml.hbs";
}
#[derive(Serialize)]
struct EditFileDiffFencedPromptTemplate {
path: Option<PathBuf>,
edit_description: String,
}
impl Template for EditFileDiffFencedPromptTemplate {
const TEMPLATE_NAME: &'static str = "edit_file_prompt_diff_fenced.hbs";
impl Template for EditFilePromptTemplate {
const TEMPLATE_NAME: &'static str = "edit_file_prompt.hbs";
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -81,7 +70,6 @@ pub struct EditAgent {
action_log: Entity<ActionLog>,
project: Entity<Project>,
templates: Arc<Templates>,
edit_format: EditFormat,
}
impl EditAgent {
@@ -90,14 +78,12 @@ impl EditAgent {
project: Entity<Project>,
action_log: Entity<ActionLog>,
templates: Arc<Templates>,
edit_format: EditFormat,
) -> Self {
EditAgent {
model,
project,
action_log,
templates,
edit_format,
}
}
@@ -223,23 +209,14 @@ impl EditAgent {
let this = self.clone();
let (events_tx, events_rx) = mpsc::unbounded();
let conversation = conversation.clone();
let edit_format = self.edit_format;
let output = cx.spawn(async move |cx| {
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
let prompt = match edit_format {
EditFormat::XmlTags => EditFileXmlPromptTemplate {
path,
edit_description,
}
.render(&this.templates)?,
EditFormat::DiffFenced => EditFileDiffFencedPromptTemplate {
path,
edit_description,
}
.render(&this.templates)?,
};
let prompt = EditFilePromptTemplate {
path,
edit_description,
}
.render(&this.templates)?;
let edit_chunks = this
.request(conversation, CompletionIntent::EditFile, prompt, cx)
.await?;
@@ -259,7 +236,7 @@ impl EditAgent {
self.action_log
.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?;
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, self.edit_format, cx);
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, cx);
let mut edit_events = edit_events.peekable();
while let Some(edit_event) = Pin::new(&mut edit_events).peek().await {
// Skip events until we're at the start of a new edit.
@@ -373,7 +350,6 @@ impl EditAgent {
fn parse_edit_chunks(
chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
edit_format: EditFormat,
cx: &mut AsyncApp,
) -> (
Task<Result<EditAgentOutput>>,
@@ -383,7 +359,7 @@ impl EditAgent {
let output = cx.background_spawn(async move {
pin_mut!(chunks);
let mut parser = EditParser::new(edit_format);
let mut parser = EditParser::new();
let mut raw_edits = String::new();
while let Some(chunk) = chunks.next().await {
match chunk {
@@ -1379,13 +1355,7 @@ mod tests {
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
let model = Arc::new(FakeLanguageModel::default());
let action_log = cx.new(|_| ActionLog::new(project.clone()));
EditAgent::new(
model,
project,
action_log,
Templates::new(),
EditFormat::XmlTags,
)
EditAgent::new(model, project, action_log, Templates::new())
}
#[gpui::test(iterations = 10)]

View File

@@ -1,18 +1,13 @@
use anyhow::bail;
use derive_more::{Add, AddAssign};
use language_model::LanguageModel;
use regex::Regex;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use std::{mem, ops::Range, str::FromStr, sync::Arc};
use std::{mem, ops::Range};
const OLD_TEXT_END_TAG: &str = "</old_text>";
const NEW_TEXT_END_TAG: &str = "</new_text>";
const EDITS_END_TAG: &str = "</edits>";
const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
const SEPARATOR_MARKER: &str = "=======";
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
#[derive(Debug)]
@@ -36,153 +31,44 @@ pub struct EditParserMetrics {
pub mismatched_tags: usize,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum EditFormat {
/// XML-like tags:
/// <old_text>...</old_text>
/// <new_text>...</new_text>
XmlTags,
/// Diff-fenced format, in which:
/// - Text before the SEARCH marker is ignored
/// - Fences are optional
/// - Line hint is optional.
///
/// Example:
///
/// ```diff
/// <<<<<<< SEARCH line=42
/// ...
/// =======
/// ...
/// >>>>>>> REPLACE
/// ```
DiffFenced,
}
impl FromStr for EditFormat {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
match s.to_lowercase().as_str() {
"xml_tags" | "xml" => Ok(EditFormat::XmlTags),
"diff_fenced" | "diff-fenced" | "diff" => Ok(EditFormat::DiffFenced),
_ => bail!("Unknown EditFormat: {}", s),
}
}
}
impl EditFormat {
/// Return an optimal edit format for the language model
pub fn from_model(model: Arc<dyn LanguageModel>) -> anyhow::Result<Self> {
if model.provider_id().0 == "google" || model.id().0.to_lowercase().contains("gemini") {
Ok(EditFormat::DiffFenced)
} else {
Ok(EditFormat::XmlTags)
}
}
/// Return an optimal edit format for the language model,
/// with the ability to override it by setting the
/// `ZED_EDIT_FORMAT` environment variable
#[allow(dead_code)]
pub fn from_env(model: Arc<dyn LanguageModel>) -> anyhow::Result<Self> {
let default = EditFormat::from_model(model)?;
std::env::var("ZED_EDIT_FORMAT").map_or(Ok(default), |s| EditFormat::from_str(&s))
}
}
pub trait EditFormatParser: Send + std::fmt::Debug {
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]>;
fn take_metrics(&mut self) -> EditParserMetrics;
}
#[derive(Debug)]
pub struct XmlEditParser {
state: XmlParserState,
pub struct EditParser {
state: EditParserState,
buffer: String,
metrics: EditParserMetrics,
}
#[derive(Debug, PartialEq)]
enum XmlParserState {
enum EditParserState {
Pending,
WithinOldText { start: bool, line_hint: Option<u32> },
AfterOldText,
WithinNewText { start: bool },
}
#[derive(Debug)]
pub struct DiffFencedEditParser {
state: DiffParserState,
buffer: String,
metrics: EditParserMetrics,
}
#[derive(Debug, PartialEq)]
enum DiffParserState {
Pending,
WithinSearch { start: bool, line_hint: Option<u32> },
WithinReplace { start: bool },
}
/// Main parser that delegates to format-specific parsers
pub struct EditParser {
parser: Box<dyn EditFormatParser>,
}
impl XmlEditParser {
impl EditParser {
pub fn new() -> Self {
XmlEditParser {
state: XmlParserState::Pending,
EditParser {
state: EditParserState::Pending,
buffer: String::new(),
metrics: EditParserMetrics::default(),
}
}
fn find_end_tag(&self) -> Option<Range<usize>> {
let (tag, start_ix) = END_TAGS
.iter()
.flat_map(|tag| Some((tag, self.buffer.find(tag)?)))
.min_by_key(|(_, ix)| *ix)?;
Some(start_ix..start_ix + tag.len())
}
fn ends_with_tag_prefix(&self) -> bool {
let mut end_prefixes = END_TAGS
.iter()
.flat_map(|tag| (1..tag.len()).map(move |i| &tag[..i]))
.chain(["\n"]);
end_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
}
fn parse_line_hint(&self, tag: &str) -> Option<u32> {
use std::sync::LazyLock;
static LINE_HINT_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap());
LINE_HINT_REGEX
.captures(tag)
.and_then(|caps| caps.get(1))
.and_then(|m| m.as_str().parse::<u32>().ok())
}
}
impl EditFormatParser for XmlEditParser {
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
pub fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
self.buffer.push_str(chunk);
let mut edit_events = SmallVec::new();
loop {
match &mut self.state {
XmlParserState::Pending => {
EditParserState::Pending => {
if let Some(start) = self.buffer.find("<old_text") {
if let Some(tag_end) = self.buffer[start..].find('>') {
let tag_end = start + tag_end + 1;
let tag = &self.buffer[start..tag_end];
let line_hint = self.parse_line_hint(tag);
self.buffer.drain(..tag_end);
self.state = XmlParserState::WithinOldText {
self.state = EditParserState::WithinOldText {
start: true,
line_hint,
};
@@ -193,7 +79,7 @@ impl EditFormatParser for XmlEditParser {
break;
}
}
XmlParserState::WithinOldText { start, line_hint } => {
EditParserState::WithinOldText { start, line_hint } => {
if !self.buffer.is_empty() {
if *start && self.buffer.starts_with('\n') {
self.buffer.remove(0);
@@ -214,7 +100,7 @@ impl EditFormatParser for XmlEditParser {
}
self.buffer.drain(..tag_range.end);
self.state = XmlParserState::AfterOldText;
self.state = EditParserState::AfterOldText;
edit_events.push(EditParserEvent::OldTextChunk {
chunk,
done: true,
@@ -231,15 +117,15 @@ impl EditFormatParser for XmlEditParser {
break;
}
}
XmlParserState::AfterOldText => {
EditParserState::AfterOldText => {
if let Some(start) = self.buffer.find("<new_text>") {
self.buffer.drain(..start + "<new_text>".len());
self.state = XmlParserState::WithinNewText { start: true };
self.state = EditParserState::WithinNewText { start: true };
} else {
break;
}
}
XmlParserState::WithinNewText { start } => {
EditParserState::WithinNewText { start } => {
if !self.buffer.is_empty() {
if *start && self.buffer.starts_with('\n') {
self.buffer.remove(0);
@@ -259,7 +145,7 @@ impl EditFormatParser for XmlEditParser {
}
self.buffer.drain(..tag_range.end);
self.state = XmlParserState::Pending;
self.state = EditParserState::Pending;
edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true });
} else {
if !self.ends_with_tag_prefix() {
@@ -276,163 +162,34 @@ impl EditFormatParser for XmlEditParser {
edit_events
}
fn take_metrics(&mut self) -> EditParserMetrics {
std::mem::take(&mut self.metrics)
}
}
impl DiffFencedEditParser {
pub fn new() -> Self {
DiffFencedEditParser {
state: DiffParserState::Pending,
buffer: String::new(),
metrics: EditParserMetrics::default(),
}
}
fn ends_with_diff_marker_prefix(&self) -> bool {
let diff_markers = [SEPARATOR_MARKER, REPLACE_MARKER];
let mut diff_prefixes = diff_markers
fn find_end_tag(&self) -> Option<Range<usize>> {
let (tag, start_ix) = END_TAGS
.iter()
.flat_map(|marker| (1..marker.len()).map(move |i| &marker[..i]))
.chain(["\n"]);
diff_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
.flat_map(|tag| Some((tag, self.buffer.find(tag)?)))
.min_by_key(|(_, ix)| *ix)?;
Some(start_ix..start_ix + tag.len())
}
fn parse_line_hint(&self, search_line: &str) -> Option<u32> {
use regex::Regex;
use std::sync::LazyLock;
static LINE_HINT_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap());
fn ends_with_tag_prefix(&self) -> bool {
let mut end_prefixes = END_TAGS
.iter()
.flat_map(|tag| (1..tag.len()).map(move |i| &tag[..i]))
.chain(["\n"]);
end_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
}
fn parse_line_hint(&self, tag: &str) -> Option<u32> {
static LINE_HINT_REGEX: std::sync::LazyLock<Regex> =
std::sync::LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap());
LINE_HINT_REGEX
.captures(search_line)
.captures(tag)
.and_then(|caps| caps.get(1))
.and_then(|m| m.as_str().parse::<u32>().ok())
}
}
impl EditFormatParser for DiffFencedEditParser {
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
self.buffer.push_str(chunk);
let mut edit_events = SmallVec::new();
loop {
match &mut self.state {
DiffParserState::Pending => {
if let Some(diff) = self.buffer.find(SEARCH_MARKER) {
let search_end = diff + SEARCH_MARKER.len();
if let Some(newline_pos) = self.buffer[search_end..].find('\n') {
let search_line = &self.buffer[diff..search_end + newline_pos];
let line_hint = self.parse_line_hint(search_line);
self.buffer.drain(..search_end + newline_pos + 1);
self.state = DiffParserState::WithinSearch {
start: true,
line_hint,
};
} else {
break;
}
} else {
break;
}
}
DiffParserState::WithinSearch { start, line_hint } => {
if !self.buffer.is_empty() {
if *start && self.buffer.starts_with('\n') {
self.buffer.remove(0);
}
*start = false;
}
let line_hint = *line_hint;
if let Some(separator_pos) = self.buffer.find(SEPARATOR_MARKER) {
let mut chunk = self.buffer[..separator_pos].to_string();
if chunk.ends_with('\n') {
chunk.pop();
}
let separator_end = separator_pos + SEPARATOR_MARKER.len();
if let Some(newline_pos) = self.buffer[separator_end..].find('\n') {
self.buffer.drain(..separator_end + newline_pos + 1);
self.state = DiffParserState::WithinReplace { start: true };
edit_events.push(EditParserEvent::OldTextChunk {
chunk,
done: true,
line_hint,
});
} else {
break;
}
} else {
if !self.ends_with_diff_marker_prefix() {
edit_events.push(EditParserEvent::OldTextChunk {
chunk: mem::take(&mut self.buffer),
done: false,
line_hint,
});
}
break;
}
}
DiffParserState::WithinReplace { start } => {
if !self.buffer.is_empty() {
if *start && self.buffer.starts_with('\n') {
self.buffer.remove(0);
}
*start = false;
}
if let Some(replace_pos) = self.buffer.find(REPLACE_MARKER) {
let mut chunk = self.buffer[..replace_pos].to_string();
if chunk.ends_with('\n') {
chunk.pop();
}
self.buffer.drain(..replace_pos + REPLACE_MARKER.len());
if let Some(newline_pos) = self.buffer.find('\n') {
self.buffer.drain(..newline_pos + 1);
} else {
self.buffer.clear();
}
self.state = DiffParserState::Pending;
edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true });
} else {
if !self.ends_with_diff_marker_prefix() {
edit_events.push(EditParserEvent::NewTextChunk {
chunk: mem::take(&mut self.buffer),
done: false,
});
}
break;
}
}
}
}
edit_events
}
fn take_metrics(&mut self) -> EditParserMetrics {
std::mem::take(&mut self.metrics)
}
}
impl EditParser {
pub fn new(format: EditFormat) -> Self {
let parser: Box<dyn EditFormatParser> = match format {
EditFormat::XmlTags => Box::new(XmlEditParser::new()),
EditFormat::DiffFenced => Box::new(DiffFencedEditParser::new()),
};
EditParser { parser }
}
pub fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
self.parser.push(chunk)
}
pub fn finish(mut self) -> EditParserMetrics {
self.parser.take_metrics()
pub fn finish(self) -> EditParserMetrics {
self.metrics
}
}
@@ -444,8 +201,8 @@ mod tests {
use std::cmp;
#[gpui::test(iterations = 1000)]
fn test_xml_single_edit(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_single_edit(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
"<old_text>original</old_text><new_text>updated</new_text>",
@@ -468,8 +225,8 @@ mod tests {
}
#[gpui::test(iterations = 1000)]
fn test_xml_multiple_edits(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_multiple_edits(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
indoc! {"
@@ -506,8 +263,8 @@ mod tests {
}
#[gpui::test(iterations = 1000)]
fn test_xml_edits_with_extra_text(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_edits_with_extra_text(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
indoc! {"
@@ -548,8 +305,8 @@ mod tests {
}
#[gpui::test(iterations = 1000)]
fn test_xml_nested_tags(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_nested_tags(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
"<old_text>code with <tag>nested</tag> elements</old_text><new_text>new <code>content</code></new_text>",
@@ -572,8 +329,8 @@ mod tests {
}
#[gpui::test(iterations = 1000)]
fn test_xml_empty_old_and_new_text(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_empty_old_and_new_text(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
"<old_text></old_text><new_text></new_text>",
@@ -596,8 +353,8 @@ mod tests {
}
#[gpui::test(iterations = 100)]
fn test_xml_multiline_content(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_multiline_content(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
"<old_text>line1\nline2\nline3</old_text><new_text>line1\nmodified line2\nline3</new_text>",
@@ -620,8 +377,8 @@ mod tests {
}
#[gpui::test(iterations = 1000)]
fn test_xml_mismatched_tags(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::XmlTags);
fn test_mismatched_tags(mut rng: StdRng) {
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
// Reduced from an actual Sonnet 3.7 output
@@ -671,7 +428,7 @@ mod tests {
}
);
let mut parser = EditParser::new(EditFormat::XmlTags);
let mut parser = EditParser::new();
assert_eq!(
parse_random_chunks(
// Reduced from an actual Opus 4 output
@@ -702,230 +459,10 @@ mod tests {
);
}
#[gpui::test(iterations = 1000)]
fn test_diff_fenced_single_edit(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::DiffFenced);
assert_eq!(
parse_random_chunks(
indoc! {"
<<<<<<< SEARCH
original text
=======
updated text
>>>>>>> REPLACE
"},
&mut parser,
&mut rng
),
vec![Edit {
old_text: "original text".to_string(),
new_text: "updated text".to_string(),
line_hint: None,
}]
);
assert_eq!(
parser.finish(),
EditParserMetrics {
tags: 0,
mismatched_tags: 0
}
);
}
#[gpui::test(iterations = 100)]
fn test_diff_fenced_with_markdown_fences(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::DiffFenced);
assert_eq!(
parse_random_chunks(
indoc! {"
```diff
<<<<<<< SEARCH
from flask import Flask
=======
import math
from flask import Flask
>>>>>>> REPLACE
```
"},
&mut parser,
&mut rng
),
vec![Edit {
old_text: "from flask import Flask".to_string(),
new_text: "import math\nfrom flask import Flask".to_string(),
line_hint: None,
}]
);
assert_eq!(
parser.finish(),
EditParserMetrics {
tags: 0,
mismatched_tags: 0
}
);
}
#[gpui::test(iterations = 100)]
fn test_diff_fenced_multiple_edits(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::DiffFenced);
assert_eq!(
parse_random_chunks(
indoc! {"
<<<<<<< SEARCH
first old
=======
first new
>>>>>>> REPLACE
<<<<<<< SEARCH
second old
=======
second new
>>>>>>> REPLACE
"},
&mut parser,
&mut rng
),
vec![
Edit {
old_text: "first old".to_string(),
new_text: "first new".to_string(),
line_hint: None,
},
Edit {
old_text: "second old".to_string(),
new_text: "second new".to_string(),
line_hint: None,
},
]
);
assert_eq!(
parser.finish(),
EditParserMetrics {
tags: 0,
mismatched_tags: 0
}
);
}
#[gpui::test(iterations = 100)]
fn test_mixed_formats(mut rng: StdRng) {
// Test XML format parser only parses XML tags
let mut xml_parser = EditParser::new(EditFormat::XmlTags);
assert_eq!(
parse_random_chunks(
indoc! {"
<old_text>xml style old</old_text><new_text>xml style new</new_text>
<<<<<<< SEARCH
diff style old
=======
diff style new
>>>>>>> REPLACE
"},
&mut xml_parser,
&mut rng
),
vec![Edit {
old_text: "xml style old".to_string(),
new_text: "xml style new".to_string(),
line_hint: None,
},]
);
assert_eq!(
xml_parser.finish(),
EditParserMetrics {
tags: 2,
mismatched_tags: 0
}
);
// Test diff-fenced format parser only parses diff markers
let mut diff_parser = EditParser::new(EditFormat::DiffFenced);
assert_eq!(
parse_random_chunks(
indoc! {"
<old_text>xml style old</old_text><new_text>xml style new</new_text>
<<<<<<< SEARCH
diff style old
=======
diff style new
>>>>>>> REPLACE
"},
&mut diff_parser,
&mut rng
),
vec![Edit {
old_text: "diff style old".to_string(),
new_text: "diff style new".to_string(),
line_hint: None,
},]
);
assert_eq!(
diff_parser.finish(),
EditParserMetrics {
tags: 0,
mismatched_tags: 0
}
);
}
#[gpui::test(iterations = 100)]
fn test_diff_fenced_empty_sections(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::DiffFenced);
assert_eq!(
parse_random_chunks(
indoc! {"
<<<<<<< SEARCH
=======
>>>>>>> REPLACE
"},
&mut parser,
&mut rng
),
vec![Edit {
old_text: "".to_string(),
new_text: "".to_string(),
line_hint: None,
}]
);
assert_eq!(
parser.finish(),
EditParserMetrics {
tags: 0,
mismatched_tags: 0
}
);
}
#[gpui::test(iterations = 100)]
fn test_diff_fenced_with_line_hint(mut rng: StdRng) {
let mut parser = EditParser::new(EditFormat::DiffFenced);
let edits = parse_random_chunks(
indoc! {"
<<<<<<< SEARCH line=42
original text
=======
updated text
>>>>>>> REPLACE
"},
&mut parser,
&mut rng,
);
assert_eq!(
edits,
vec![Edit {
old_text: "original text".to_string(),
line_hint: Some(42),
new_text: "updated text".to_string(),
}]
);
}
#[gpui::test(iterations = 100)]
fn test_xml_line_hints(mut rng: StdRng) {
fn test_line_hints(mut rng: StdRng) {
// Line hint is a single quoted line number
let mut parser = EditParser::new(EditFormat::XmlTags);
let mut parser = EditParser::new();
let edits = parse_random_chunks(
r#"
@@ -941,7 +478,7 @@ mod tests {
assert_eq!(edits[0].new_text, "updated code");
// Line hint is a single unquoted line number
let mut parser = EditParser::new(EditFormat::XmlTags);
let mut parser = EditParser::new();
let edits = parse_random_chunks(
r#"
@@ -957,7 +494,7 @@ mod tests {
assert_eq!(edits[0].new_text, "updated code");
// Line hint is a range
let mut parser = EditParser::new(EditFormat::XmlTags);
let mut parser = EditParser::new();
let edits = parse_random_chunks(
r#"
@@ -973,7 +510,7 @@ mod tests {
assert_eq!(edits[0].new_text, "updated code");
// No line hint
let mut parser = EditParser::new(EditFormat::XmlTags);
let mut parser = EditParser::new();
let edits = parse_random_chunks(
r#"
<old_text>old</old_text>

View File

@@ -41,7 +41,7 @@ fn eval_extract_handle_command_output() {
// ----------------------------|----------
// claude-3.7-sonnet | 0.99 (2025-06-14)
// claude-sonnet-4 | 0.97 (2025-06-14)
// gemini-2.5-pro-06-05 | 0.98 (2025-06-16)
// gemini-2.5-pro-06-05 | 0.77 (2025-05-22)
// gemini-2.5-flash | 0.11 (2025-05-22)
// gpt-4.1 | 1.00 (2025-05-22)
@@ -59,7 +59,7 @@ fn eval_extract_handle_command_output() {
let edit_description = "Extract `handle_command_output` method from `run_git_blame`.";
eval(
100,
0.95,
0.7, // Taking the lower bar for Gemini
0.05,
EvalInput::from_conversation(
vec![
@@ -116,7 +116,7 @@ fn eval_delete_run_git_blame() {
// ----------------------------|----------
// claude-3.7-sonnet | 1.0 (2025-06-14)
// claude-sonnet-4 | 0.96 (2025-06-14)
// gemini-2.5-pro-06-05 | 1.0 (2025-06-16)
// gemini-2.5-pro-06-05 |
// gemini-2.5-flash |
// gpt-4.1 |
let input_file_path = "root/blame.rs";
@@ -241,7 +241,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
//
// claude-3.7-sonnet | 0.96 (2025-06-14)
// claude-sonnet-4 | 0.11 (2025-06-14)
// gemini-2.5-pro-preview-latest | 0.99 (2025-06-16)
// gemini-2.5-pro-preview-03-25 | 0.99 (2025-05-22)
// gemini-2.5-flash-preview-04-17 |
// gpt-4.1 |
let input_file_path = "root/lib.rs";
@@ -366,7 +366,7 @@ fn eval_disable_cursor_blinking() {
//
// claude-3.7-sonnet | 0.99 (2025-06-14)
// claude-sonnet-4 | 0.85 (2025-06-14)
// gemini-2.5-pro-preview-latest | 0.97 (2025-06-16)
// gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22)
// gemini-2.5-flash-preview-04-17 |
// gpt-4.1 |
let input_file_path = "root/editor.rs";
@@ -453,11 +453,12 @@ fn eval_from_pixels_constructor() {
// (e.g., at the beginning of the file), yet the evaluation may still
// rate it highly.
//
// Model | Date | Pass rate
// =========================================================
// claude-4.0-sonnet | 2025-06-14 | 0.99
// claude-3.7-sonnet | 2025-06-14 | 0.88
// gemini-2.5-pro-preview-06-05 | 2025-06-16 | 0.98
// Model | Pass rate
// ============================================
//
// claude-4.0-sonnet | 0.99
// claude-3.7-sonnet | 0.88
// gemini-2.5-pro-preview-03-25 | 0.96
// gpt-4.1 |
let input_file_path = "root/canvas.rs";
let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs");
@@ -1497,16 +1498,8 @@ impl EditAgentTest {
.await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
let edit_format = EditFormat::from_env(agent_model.clone()).unwrap();
Self {
agent: EditAgent::new(
agent_model,
project.clone(),
action_log,
Templates::new(),
edit_format,
),
agent: EditAgent::new(agent_model, project.clone(), action_log, Templates::new()),
project,
judge_model,
}

View File

@@ -16632,56 +16632,6 @@ impl Editor {
}
}
// Returns true if the editor handled a go-to-line request
pub fn go_to_active_debug_line(&mut self, window: &mut Window, cx: &mut Context<Self>) -> bool {
maybe!({
let breakpoint_store = self.breakpoint_store.as_ref()?;
let Some(active_stack_frame) = breakpoint_store.read(cx).active_position().cloned()
else {
self.clear_row_highlights::<ActiveDebugLine>();
return None;
};
let position = active_stack_frame.position;
let buffer_id = position.buffer_id?;
let snapshot = self
.project
.as_ref()?
.read(cx)
.buffer_for_id(buffer_id, cx)?
.read(cx)
.snapshot();
let mut handled = false;
for (id, ExcerptRange { context, .. }) in
self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx)
{
if context.start.cmp(&position, &snapshot).is_ge()
|| context.end.cmp(&position, &snapshot).is_lt()
{
continue;
}
let snapshot = self.buffer.read(cx).snapshot(cx);
let multibuffer_anchor = snapshot.anchor_in_excerpt(id, position)?;
handled = true;
self.clear_row_highlights::<ActiveDebugLine>();
self.go_to_line::<ActiveDebugLine>(
multibuffer_anchor,
Some(cx.theme().colors().editor_debugger_active_line_background),
window,
cx,
);
cx.notify();
}
handled.then_some(())
})
.is_some()
}
pub fn copy_file_name_without_extension(
&mut self,
_: &CopyFileNameWithoutExtension,

View File

@@ -44,10 +44,6 @@ impl StreamingFuzzyMatcher {
/// Returns `Some(range)` if a match has been found with the accumulated
/// query so far, or `None` if no suitable match exists yet.
pub fn push(&mut self, chunk: &str, line_hint: Option<u32>) -> Option<Range<usize>> {
if line_hint.is_some() {
self.line_hint = line_hint;
}
// Add the chunk to our incomplete line buffer
self.incomplete_line.push_str(chunk);
self.line_hint = line_hint;

View File

@@ -1,6 +1,6 @@
use crate::{
Templates,
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat},
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
schema::json_schema_for,
ui::{COLLAPSED_LINES, ToolOutputPreview},
};
@@ -201,14 +201,8 @@ impl Tool for EditFileTool {
let card_clone = card.clone();
let action_log_clone = action_log.clone();
let task = cx.spawn(async move |cx: &mut AsyncApp| {
let edit_format = EditFormat::from_model(model.clone())?;
let edit_agent = EditAgent::new(
model,
project.clone(),
action_log_clone,
Templates::new(),
edit_format,
);
let edit_agent =
EditAgent::new(model, project.clone(), action_log_clone, Templates::new());
let buffer = project
.update(cx, |project, cx| {

View File

@@ -1,77 +0,0 @@
You MUST respond with a series of edits to a file, using the following diff format:
```
<<<<<<< SEARCH line=1
from flask import Flask
=======
import math
from flask import Flask
>>>>>>> REPLACE
<<<<<<< SEARCH line=325
return 0
=======
print("Done")
return 0
>>>>>>> REPLACE
```
# File Editing Instructions
- Use the SEARCH/REPLACE diff format shown above
- The SEARCH section must exactly match existing file content, including indentation
- The SEARCH section must come from the actual file, not an outline
- The SEARCH section cannot be empty
- `line` should be a starting line number for the text to be replaced
- Be minimal with replacements:
- For unique lines, include only those lines
- For non-unique lines, include enough context to identify them
- Do not escape quotes, newlines, or other characters
- For multiple occurrences, repeat the same diff block for each instance
- Edits are sequential - each assumes previous edits are already applied
- Only edit the specified file
# Example
```
<<<<<<< SEARCH line=3
struct User {
name: String,
email: String,
}
=======
struct User {
name: String,
email: String,
active: bool,
}
>>>>>>> REPLACE
<<<<<<< SEARCH line=25
let user = User {
name: String::from("John"),
email: String::from("john@example.com"),
};
=======
let user = User {
name: String::from("John"),
email: String::from("john@example.com"),
active: true,
};
>>>>>>> REPLACE
```
# Final instructions
Tool calls have been disabled. You MUST respond using the SEARCH/REPLACE diff format only.
<file_to_edit>
{{path}}
</file_to_edit>
<edit_description>
{{edit_description}}
</edit_description>

View File

@@ -350,10 +350,7 @@ impl Telemetry {
worktree_id: WorktreeId,
updated_entries_set: &UpdatedEntriesSet,
) {
let Some(project_type_names) = self.detect_project_types(worktree_id, updated_entries_set)
else {
return;
};
let project_type_names = self.detect_project_types(worktree_id, updated_entries_set);
for project_type_name in project_type_names {
telemetry::event!("Project Opened", project_type = project_type_name);
@@ -364,49 +361,42 @@ impl Telemetry {
self: &Arc<Self>,
worktree_id: WorktreeId,
updated_entries_set: &UpdatedEntriesSet,
) -> Option<Vec<String>> {
) -> Vec<String> {
let mut state = self.state.lock();
let mut project_names: HashSet<String> = HashSet::new();
if state
.worktrees_with_project_type_events_sent
.contains(&worktree_id)
{
return None;
return project_names.into_iter().collect();
}
let mut project_types: HashSet<&str> = HashSet::new();
for (path, _, _) in updated_entries_set.iter() {
let Some(file_name) = path.file_name().and_then(|f| f.to_str()) else {
continue;
};
let project_type = if file_name == "pnpm-lock.yaml" {
Some("pnpm")
if file_name == "pnpm-lock.yaml" {
project_names.insert("pnpm".to_string());
} else if file_name == "yarn.lock" {
Some("yarn")
project_names.insert("yarn".to_string());
} else if file_name == "package.json" {
Some("node")
project_names.insert("node".to_string());
} else if DOTNET_PROJECT_FILES_REGEX.is_match(file_name) {
Some("dotnet")
} else {
None
};
if let Some(project_type) = project_type {
project_types.insert(project_type);
};
project_names.insert("dotnet".to_string());
}
}
if !project_types.is_empty() {
if !project_names.is_empty() {
state
.worktrees_with_project_type_events_sent
.insert(worktree_id);
}
let mut project_types: Vec<_> = project_types.into_iter().map(String::from).collect();
project_types.sort();
Some(project_types)
let mut project_names_vec: Vec<String> = project_names.into_iter().collect();
project_names_vec.sort();
project_names_vec
}
fn report_event(self: &Arc<Self>, event: Event) {
@@ -700,19 +690,16 @@ mod tests {
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
let worktree_id = 1;
// Scan of empty worktree finds nothing
test_project_discovery_helper(telemetry.clone(), vec![], Some(vec![]), worktree_id);
// Files added, second scan of worktree 1 finds project type
// First scan of worktree 1 returns project types
test_project_discovery_helper(
telemetry.clone(),
vec!["package.json"],
Some(vec!["node"]),
vec!["node"],
worktree_id,
);
// Third scan of worktree does not double report, as we already reported
test_project_discovery_helper(telemetry.clone(), vec!["package.json"], None, worktree_id);
// Rescan of worktree 1 returns nothing as it has already been reported
test_project_discovery_helper(telemetry.clone(), vec!["package.json"], vec![], worktree_id);
}
#[gpui::test]
@@ -726,7 +713,7 @@ mod tests {
test_project_discovery_helper(
telemetry.clone(),
vec!["package.json", "pnpm-lock.yaml"],
Some(vec!["node", "pnpm"]),
vec!["node", "pnpm"],
1,
);
}
@@ -742,7 +729,7 @@ mod tests {
test_project_discovery_helper(
telemetry.clone(),
vec!["package.json", "yarn.lock"],
Some(vec!["node", "yarn"]),
vec!["node", "yarn"],
1,
);
}
@@ -761,41 +748,26 @@ mod tests {
test_project_discovery_helper(
telemetry.clone().clone(),
vec!["global.json"],
Some(vec!["dotnet"]),
vec!["dotnet"],
1,
);
test_project_discovery_helper(
telemetry.clone(),
vec!["Directory.Build.props"],
Some(vec!["dotnet"]),
vec!["dotnet"],
2,
);
test_project_discovery_helper(
telemetry.clone(),
vec!["file.csproj"],
Some(vec!["dotnet"]),
3,
);
test_project_discovery_helper(
telemetry.clone(),
vec!["file.fsproj"],
Some(vec!["dotnet"]),
4,
);
test_project_discovery_helper(
telemetry.clone(),
vec!["file.vbproj"],
Some(vec!["dotnet"]),
5,
);
test_project_discovery_helper(telemetry.clone(), vec!["file.sln"], Some(vec!["dotnet"]), 6);
test_project_discovery_helper(telemetry.clone(), vec!["file.csproj"], vec!["dotnet"], 3);
test_project_discovery_helper(telemetry.clone(), vec!["file.fsproj"], vec!["dotnet"], 4);
test_project_discovery_helper(telemetry.clone(), vec!["file.vbproj"], vec!["dotnet"], 5);
test_project_discovery_helper(telemetry.clone(), vec!["file.sln"], vec!["dotnet"], 6);
// Each worktree should only send a single project type event, even when
// encountering multiple files associated with that project type
test_project_discovery_helper(
telemetry,
vec!["global.json", "Directory.Build.props"],
Some(vec!["dotnet"]),
vec!["dotnet"],
7,
);
}
@@ -820,7 +792,7 @@ mod tests {
fn test_project_discovery_helper(
telemetry: Arc<Telemetry>,
file_paths: Vec<&str>,
expected_project_types: Option<Vec<&str>>,
expected_project_types: Vec<&str>,
worktree_id_num: usize,
) {
let worktree_id = WorktreeId::from_usize(worktree_id_num);
@@ -837,11 +809,15 @@ mod tests {
.collect();
let updated_entries: UpdatedEntriesSet = Arc::from(entries.as_slice());
let detected_project_types = telemetry.detect_project_types(worktree_id, &updated_entries);
let mut detected_types = telemetry.detect_project_types(worktree_id, &updated_entries);
detected_types.sort();
let expected_project_types =
expected_project_types.map(|types| types.iter().map(|&t| t.to_string()).collect());
let mut expected_sorted = expected_project_types
.into_iter()
.map(String::from)
.collect::<Vec<_>>();
expected_sorted.sort();
assert_eq!(detected_project_types, expected_project_types);
assert_eq!(detected_types, expected_sorted);
}
}

View File

@@ -31,7 +31,7 @@ use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND};
use crate::rpc::{ResultExt as _, Server};
use crate::stripe_client::{
StripeCancellationDetailsReason, StripeClient, StripeCustomerId, StripeSubscription,
StripeSubscriptionId, UpdateCustomerParams,
StripeSubscriptionId,
};
use crate::{AppState, Error, Result};
use crate::{db::UserId, llm::db::LlmDatabase};
@@ -353,17 +353,7 @@ async fn create_billing_subscription(
}
let customer_id = if let Some(existing_customer) = &existing_billing_customer {
let customer_id = StripeCustomerId(existing_customer.stripe_customer_id.clone().into());
if let Some(email) = user.email_address.as_deref() {
stripe_billing
.client()
.update_customer(&customer_id, UpdateCustomerParams { email: Some(email) })
.await
// Update of email address is best-effort - continue checkout even if it fails
.context("error updating stripe customer email address")
.log_err();
}
customer_id
StripeCustomerId(existing_customer.stripe_customer_id.clone().into())
} else {
stripe_billing
.find_or_create_customer_by_email(user.email_address.as_deref())

View File

@@ -50,10 +50,6 @@ impl StripeBilling {
}
}
pub fn client(&self) -> &Arc<dyn StripeClient> {
&self.client
}
pub async fn initialize(&self) -> Result<()> {
log::info!("StripeBilling: initializing");

View File

@@ -27,11 +27,6 @@ pub struct CreateCustomerParams<'a> {
pub email: Option<&'a str>,
}
#[derive(Debug)]
pub struct UpdateCustomerParams<'a> {
pub email: Option<&'a str>,
}
#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)]
pub struct StripeSubscriptionId(pub Arc<str>);
@@ -198,12 +193,6 @@ pub trait StripeClient: Send + Sync {
async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result<StripeCustomer>;
async fn update_customer(
&self,
customer_id: &StripeCustomerId,
params: UpdateCustomerParams<'_>,
) -> Result<StripeCustomer>;
async fn list_subscriptions_for_customer(
&self,
customer_id: &StripeCustomerId,

View File

@@ -14,7 +14,7 @@ use crate::stripe_client::{
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripeMeterId,
StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem,
StripeSubscriptionItemId, UpdateCustomerParams, UpdateSubscriptionParams,
StripeSubscriptionItemId, UpdateSubscriptionParams,
};
#[derive(Debug, Clone)]
@@ -95,22 +95,6 @@ impl StripeClient for FakeStripeClient {
Ok(customer)
}
async fn update_customer(
&self,
customer_id: &StripeCustomerId,
params: UpdateCustomerParams<'_>,
) -> Result<StripeCustomer> {
let mut customers = self.customers.lock();
if let Some(customer) = customers.get_mut(customer_id) {
if let Some(email) = params.email {
customer.email = Some(email.to_string());
}
Ok(customer.clone())
} else {
Err(anyhow!("no customer found for {customer_id:?}"))
}
}
async fn list_subscriptions_for_customer(
&self,
customer_id: &StripeCustomerId,

View File

@@ -11,7 +11,7 @@ use stripe::{
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehavior,
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehaviorMissingPaymentMethod,
CreateCustomer, Customer, CustomerId, ListCustomers, Price, PriceId, Recurring, Subscription,
SubscriptionId, SubscriptionItem, SubscriptionItemId, UpdateCustomer, UpdateSubscriptionItems,
SubscriptionId, SubscriptionItem, SubscriptionItemId, UpdateSubscriptionItems,
UpdateSubscriptionTrialSettings, UpdateSubscriptionTrialSettingsEndBehavior,
UpdateSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod,
};
@@ -25,8 +25,7 @@ use crate::stripe_client::{
StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings,
StripeSubscriptionTrialSettingsEndBehavior,
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams,
UpdateSubscriptionParams,
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionParams,
};
pub struct RealStripeClient {
@@ -79,24 +78,6 @@ impl StripeClient for RealStripeClient {
Ok(StripeCustomer::from(customer))
}
async fn update_customer(
&self,
customer_id: &StripeCustomerId,
params: UpdateCustomerParams<'_>,
) -> Result<StripeCustomer> {
let customer = Customer::update(
&self.client,
&customer_id.try_into()?,
UpdateCustomer {
email: params.email,
..Default::default()
},
)
.await?;
Ok(StripeCustomer::from(customer))
}
async fn list_subscriptions_for_customer(
&self,
customer_id: &StripeCustomerId,

View File

@@ -193,10 +193,10 @@ impl MessageEditor {
let highlights = editor.text_highlights::<Self>(cx);
let text = editor.text(cx);
let snapshot = editor.buffer().read(cx).snapshot(cx);
let mentions = if let Some(ranges) = highlights {
let mentions = if let Some((_, ranges)) = highlights {
ranges
.iter()
.map(|(range, _)| range.to_offset(&snapshot))
.map(|range| range.to_offset(&snapshot))
.zip(self.mentions.iter().copied())
.collect()
} else {
@@ -483,19 +483,20 @@ impl MessageEditor {
let end = multi_buffer.anchor_after(range.end);
mentioned_user_ids.push(user.id);
anchor_ranges.push((
start..end,
HighlightStyle {
font_weight: Some(FontWeight::BOLD),
..Default::default()
},
));
anchor_ranges.push(start..end);
}
}
}
editor.clear_highlights::<Self>(cx);
editor.highlight_text::<Self>(anchor_ranges, cx)
editor.highlight_text::<Self>(
anchor_ranges,
HighlightStyle {
font_weight: Some(FontWeight::BOLD),
..Default::default()
},
cx,
)
});
this.mentions = mentioned_user_ids;

View File

@@ -24,7 +24,6 @@ use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId, LanguageServer
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use request::StatusNotification;
use serde_json::json;
use settings::SettingsStore;
use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace};
use std::collections::hash_map::Entry;
@@ -62,15 +61,7 @@ pub fn init(
node_runtime: NodeRuntime,
cx: &mut App,
) {
let language_settings = all_language_settings(None, cx);
let configuration = copilot_chat::CopilotChatConfiguration {
enterprise_uri: language_settings
.edit_predictions
.copilot
.enterprise_uri
.clone(),
};
copilot_chat::init(fs.clone(), http.clone(), configuration, cx);
copilot_chat::init(fs.clone(), http.clone(), cx);
let copilot = cx.new({
let node_runtime = node_runtime.clone();
@@ -356,11 +347,8 @@ impl Copilot {
_subscription: cx.on_app_quit(Self::shutdown_language_server),
};
this.start_copilot(true, false, cx);
cx.observe_global::<SettingsStore>(move |this, cx| {
this.start_copilot(true, false, cx);
this.send_configuration_update(cx);
})
.detach();
cx.observe_global::<SettingsStore>(move |this, cx| this.start_copilot(true, false, cx))
.detach();
this
}
@@ -447,43 +435,6 @@ impl Copilot {
if env.is_empty() { None } else { Some(env) }
}
fn send_configuration_update(&mut self, cx: &mut Context<Self>) {
let copilot_settings = all_language_settings(None, cx)
.edit_predictions
.copilot
.clone();
let settings = json!({
"http": {
"proxy": copilot_settings.proxy,
"proxyStrictSSL": !copilot_settings.proxy_no_verify.unwrap_or(false)
},
"github-enterprise": {
"uri": copilot_settings.enterprise_uri
}
});
if let Some(copilot_chat) = copilot_chat::CopilotChat::global(cx) {
copilot_chat.update(cx, |chat, cx| {
chat.set_configuration(
copilot_chat::CopilotChatConfiguration {
enterprise_uri: copilot_settings.enterprise_uri.clone(),
},
cx,
);
});
}
if let Ok(server) = self.server.as_running() {
server
.lsp
.notify::<lsp::notification::DidChangeConfiguration>(
&lsp::DidChangeConfigurationParams { settings },
)
.log_err();
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn fake(cx: &mut gpui::TestAppContext) -> (Entity<Self>, lsp::FakeLanguageServer) {
use fs::FakeFs;
@@ -590,6 +541,12 @@ impl Copilot {
.into_response()
.context("copilot: check status")?;
server
.request::<request::SetEditorInfo>(editor_info)
.await
.into_response()
.context("copilot: set editor info")?;
anyhow::Ok((server, status))
};
@@ -607,8 +564,6 @@ impl Copilot {
});
cx.emit(Event::CopilotLanguageServerStarted);
this.update_sign_in_status(status, cx);
// Send configuration now that the LSP is fully started
this.send_configuration_update(cx);
}
Err(error) => {
this.server = CopilotServer::Error(error.to_string().into());

View File

@@ -19,47 +19,10 @@ use settings::watch_config_dir;
pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
#[derive(Default, Clone, Debug, PartialEq)]
pub struct CopilotChatConfiguration {
pub enterprise_uri: Option<String>,
}
impl CopilotChatConfiguration {
pub fn token_url(&self) -> String {
if let Some(enterprise_uri) = &self.enterprise_uri {
let domain = Self::parse_domain(enterprise_uri);
format!("https://api.{}/copilot_internal/v2/token", domain)
} else {
"https://api.github.com/copilot_internal/v2/token".to_string()
}
}
pub fn oauth_domain(&self) -> String {
if let Some(enterprise_uri) = &self.enterprise_uri {
Self::parse_domain(enterprise_uri)
} else {
"github.com".to_string()
}
}
pub fn api_url_from_endpoint(&self, endpoint: &str) -> String {
format!("{}/chat/completions", endpoint)
}
pub fn models_url_from_endpoint(&self, endpoint: &str) -> String {
format!("{}/models", endpoint)
}
fn parse_domain(enterprise_uri: &str) -> String {
let uri = enterprise_uri.trim_end_matches('/');
if let Some(domain) = uri.strip_prefix("https://") {
domain.split('/').next().unwrap_or(domain).to_string()
} else if let Some(domain) = uri.strip_prefix("http://") {
domain.split('/').next().unwrap_or(domain).to_string()
} else {
uri.split('/').next().unwrap_or(uri).to_string()
}
}
pub struct CopilotChatSettings {
pub api_url: Arc<str>,
pub auth_url: Arc<str>,
pub models_url: Arc<str>,
}
// Copilot's base model; defined by Microsoft in premium requests table
@@ -346,19 +309,12 @@ pub struct FunctionChunk {
struct ApiTokenResponse {
token: String,
expires_at: i64,
endpoints: ApiTokenResponseEndpoints,
}
#[derive(Deserialize)]
struct ApiTokenResponseEndpoints {
api: String,
}
#[derive(Clone)]
struct ApiToken {
api_key: String,
expires_at: DateTime<chrono::Utc>,
api_endpoint: String,
}
impl ApiToken {
@@ -379,7 +335,6 @@ impl TryFrom<ApiTokenResponse> for ApiToken {
Ok(Self {
api_key: response.token,
expires_at,
api_endpoint: response.endpoints.api,
})
}
}
@@ -391,18 +346,13 @@ impl Global for GlobalCopilotChat {}
pub struct CopilotChat {
oauth_token: Option<String>,
api_token: Option<ApiToken>,
configuration: CopilotChatConfiguration,
settings: CopilotChatSettings,
models: Option<Vec<Model>>,
client: Arc<dyn HttpClient>,
}
pub fn init(
fs: Arc<dyn Fs>,
client: Arc<dyn HttpClient>,
configuration: CopilotChatConfiguration,
cx: &mut App,
) {
let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, configuration, cx));
pub fn init(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut App) {
let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, cx));
cx.set_global(GlobalCopilotChat(copilot_chat));
}
@@ -430,15 +380,10 @@ impl CopilotChat {
.map(|model| model.0.clone())
}
fn new(
fs: Arc<dyn Fs>,
client: Arc<dyn HttpClient>,
configuration: CopilotChatConfiguration,
cx: &mut Context<Self>,
) -> Self {
fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut Context<Self>) -> Self {
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
let dir_path = copilot_chat_config_dir();
let settings = CopilotChatSettings::default();
cx.spawn(async move |this, cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
@@ -447,9 +392,7 @@ impl CopilotChat {
config_paths,
);
while let Some(contents) = parent_watch_rx.next().await {
let oauth_domain =
this.read_with(cx, |this, _| this.configuration.oauth_domain())?;
let oauth_token = extract_oauth_token(contents, &oauth_domain);
let oauth_token = extract_oauth_token(contents);
this.update(cx, |this, cx| {
this.oauth_token = oauth_token.clone();
@@ -468,10 +411,9 @@ impl CopilotChat {
oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
api_token: None,
models: None,
configuration,
settings,
client,
};
if this.oauth_token.is_some() {
cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
.detach_and_log_err(cx);
@@ -481,26 +423,30 @@ impl CopilotChat {
}
async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
let (oauth_token, client, configuration) = this.read_with(cx, |this, _| {
let (oauth_token, client, auth_url) = this.read_with(cx, |this, _| {
(
this.oauth_token.clone(),
this.client.clone(),
this.configuration.clone(),
this.settings.auth_url.clone(),
)
})?;
let api_token = request_api_token(
&oauth_token.ok_or_else(|| {
anyhow!("OAuth token is missing while updating Copilot Chat models")
})?,
auth_url,
client.clone(),
)
.await?;
let oauth_token = oauth_token
.ok_or_else(|| anyhow!("OAuth token is missing while updating Copilot Chat models"))?;
let token_url = configuration.token_url();
let api_token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
let models_url = configuration.models_url_from_endpoint(&api_token.api_endpoint);
let models =
get_models(models_url.into(), api_token.api_key.clone(), client.clone()).await?;
let models_url = this.update(cx, |this, cx| {
this.api_token = Some(api_token.clone());
cx.notify();
this.settings.models_url.clone()
})?;
let models = get_models(models_url, api_token.api_key, client.clone()).await?;
this.update(cx, |this, cx| {
this.api_token = Some(api_token);
this.models = Some(models);
cx.notify();
})?;
@@ -525,23 +471,23 @@ impl CopilotChat {
.flatten()
.context("Copilot chat is not enabled")?;
let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| {
(
this.oauth_token.clone(),
this.api_token.clone(),
this.client.clone(),
this.configuration.clone(),
)
})?;
let (oauth_token, api_token, client, api_url, auth_url) =
this.read_with(&cx, |this, _| {
(
this.oauth_token.clone(),
this.api_token.clone(),
this.client.clone(),
this.settings.api_url.clone(),
this.settings.auth_url.clone(),
)
})?;
let oauth_token = oauth_token.context("No OAuth token available")?;
let token = match api_token {
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
_ => {
let token_url = configuration.token_url();
let token =
request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
let token = request_api_token(&oauth_token, auth_url, client.clone()).await?;
this.update(&mut cx, |this, cx| {
this.api_token = Some(token.clone());
cx.notify();
@@ -550,19 +496,13 @@ impl CopilotChat {
}
};
let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
stream_completion(client.clone(), token.api_key, api_url.into(), request).await
stream_completion(client.clone(), token.api_key, api_url, request).await
}
pub fn set_configuration(
&mut self,
configuration: CopilotChatConfiguration,
cx: &mut Context<Self>,
) {
let same_configuration = self.configuration == configuration;
self.configuration = configuration;
if !same_configuration {
self.api_token = None;
pub fn set_settings(&mut self, settings: CopilotChatSettings, cx: &mut Context<Self>) {
let same_settings = self.settings == settings;
self.settings = settings;
if !same_settings {
cx.spawn(async move |this, cx| {
Self::update_models(&this, cx).await?;
Ok::<_, anyhow::Error>(())
@@ -582,12 +522,16 @@ async fn get_models(
let mut models: Vec<Model> = all_models
.into_iter()
.filter(|model| {
// Ensure user has access to the model; Policy is present only for models that must be
// enabled in the GitHub dashboard
model.model_picker_enabled
&& model
.policy
.as_ref()
.is_none_or(|policy| policy.state == "enabled")
})
// The first model from the API response, in any given family, appear to be the non-tagged
// models, which are likely the best choice (e.g. gpt-4o rather than gpt-4o-2024-11-20)
.dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
.collect();
@@ -664,12 +608,12 @@ async fn request_api_token(
}
}
fn extract_oauth_token(contents: String, domain: &str) -> Option<String> {
fn extract_oauth_token(contents: String) -> Option<String> {
serde_json::from_str::<serde_json::Value>(&contents)
.map(|v| {
v.as_object().and_then(|obj| {
obj.iter().find_map(|(key, value)| {
if key.starts_with(domain) {
if key.starts_with("github.com") {
value["oauth_token"].as_str().map(|v| v.to_string())
} else {
None

View File

@@ -337,7 +337,7 @@ pub async fn download_adapter_from_github(
pub trait DebugAdapter: 'static + Send + Sync {
fn name(&self) -> DebugAdapterName;
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario>;
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario>;
async fn get_binary(
&self,
@@ -355,7 +355,7 @@ pub trait DebugAdapter: 'static + Send + Sync {
/// Extracts the kind (attach/launch) of debug configuration from the given JSON config.
/// This method should only return error when the kind cannot be determined for a given configuration;
/// in particular, it *should not* validate whether the request as a whole is valid, because that's best left to the debug adapter itself to decide.
async fn request_kind(
fn request_kind(
&self,
config: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
@@ -368,7 +368,7 @@ pub trait DebugAdapter: 'static + Send + Sync {
}
}
fn dap_schema(&self) -> serde_json::Value;
async fn dap_schema(&self) -> serde_json::Value;
fn label_for_child_session(&self, _args: &StartDebuggingRequestArguments) -> Option<String> {
None
@@ -394,11 +394,11 @@ impl DebugAdapter for FakeAdapter {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
serde_json::Value::Null
}
async fn request_kind(
fn request_kind(
&self,
config: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
@@ -417,7 +417,7 @@ impl DebugAdapter for FakeAdapter {
None
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let config = serde_json::to_value(zed_scenario.request).unwrap();
Ok(DebugScenario {
@@ -443,7 +443,7 @@ impl DebugAdapter for FakeAdapter {
envs: HashMap::default(),
cwd: None,
request_args: StartDebuggingRequestArguments {
request: self.request_kind(&task_definition.config).await?,
request: self.request_kind(&task_definition.config)?,
configuration: task_definition.config.clone(),
},
})

View File

@@ -8,7 +8,8 @@ use dap_types::{
requests::Request,
};
use futures::channel::oneshot;
use gpui::AsyncApp;
use gpui::{AppContext, AsyncApp};
use smol::channel::{Receiver, Sender};
use std::{
hash::Hash,
sync::atomic::{AtomicU64, Ordering},
@@ -43,56 +44,99 @@ impl DebugAdapterClient {
id: SessionId,
binary: DebugAdapterBinary,
message_handler: DapMessageHandler,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> Result<Self> {
let transport_delegate = TransportDelegate::start(&binary, cx).await?;
let ((server_rx, server_tx), transport_delegate) =
TransportDelegate::start(&binary, cx.clone()).await?;
let this = Self {
id,
binary,
transport_delegate,
sequence_count: AtomicU64::new(1),
};
this.connect(message_handler, cx).await?;
log::info!("Successfully connected to debug adapter");
let client_id = this.id;
// start handling events/reverse requests
cx.background_spawn(Self::handle_receive_messages(
client_id,
server_rx,
server_tx.clone(),
message_handler,
))
.detach();
Ok(this)
}
pub fn should_reconnect_for_ssh(&self) -> bool {
self.transport_delegate.tcp_arguments().is_some()
&& self.binary.command.as_deref() == Some("ssh")
}
pub async fn connect(
&self,
message_handler: DapMessageHandler,
cx: &mut AsyncApp,
) -> Result<()> {
self.transport_delegate.connect(message_handler, cx).await
}
pub async fn create_child_connection(
pub async fn reconnect(
&self,
session_id: SessionId,
binary: DebugAdapterBinary,
message_handler: DapMessageHandler,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> Result<Self> {
let binary = if let Some(connection) = self.transport_delegate.tcp_arguments() {
DebugAdapterBinary {
command: None,
arguments: Default::default(),
envs: Default::default(),
cwd: Default::default(),
connection: Some(connection),
let binary = match self.transport_delegate.transport() {
crate::transport::Transport::Tcp(tcp_transport) => DebugAdapterBinary {
command: binary.command,
arguments: binary.arguments,
envs: binary.envs,
cwd: binary.cwd,
connection: Some(crate::adapters::TcpArguments {
host: tcp_transport.host,
port: tcp_transport.port,
timeout: Some(tcp_transport.timeout),
}),
request_args: binary.request_args,
}
} else {
self.binary.clone()
},
_ => self.binary.clone(),
};
Self::start(session_id, binary, message_handler, cx).await
}
async fn handle_receive_messages(
client_id: SessionId,
server_rx: Receiver<Message>,
client_tx: Sender<Message>,
mut message_handler: DapMessageHandler,
) -> Result<()> {
let result = loop {
let message = match server_rx.recv().await {
Ok(message) => message,
Err(e) => break Err(e.into()),
};
match message {
Message::Event(ev) => {
log::debug!("Client {} received event `{}`", client_id.0, &ev);
message_handler(Message::Event(ev))
}
Message::Request(req) => {
log::debug!(
"Client {} received reverse request `{}`",
client_id.0,
&req.command
);
message_handler(Message::Request(req))
}
Message::Response(response) => {
log::debug!("Received response after request timeout: {:#?}", response);
}
}
smol::future::yield_now().await;
};
drop(client_tx);
log::debug!("Handle receive messages dropped");
result
}
/// Send a request to an adapter and get a response back
/// Note: This function will block until a response is sent back from the adapter
pub async fn request<R: Request>(&self, arguments: R::Arguments) -> Result<R::Response> {
@@ -108,7 +152,8 @@ impl DebugAdapterClient {
arguments: Some(serialized_arguments),
};
self.transport_delegate
.add_pending_request(sequence_id, callback_tx);
.add_pending_request(sequence_id, callback_tx)
.await;
log::debug!(
"Client {} send `{}` request with sequence_id: {}",
@@ -185,11 +230,8 @@ impl DebugAdapterClient {
+ Send
+ FnMut(u64, R::Arguments) -> Result<R::Response, dap_types::ErrorResponse>,
{
self.transport_delegate
.transport
.lock()
.as_fake()
.on_request::<R, F>(handler);
let transport = self.transport_delegate.transport().as_fake();
transport.on_request::<R, F>(handler);
}
#[cfg(any(test, feature = "test-support"))]
@@ -208,11 +250,8 @@ impl DebugAdapterClient {
where
F: 'static + Send + Fn(Response),
{
self.transport_delegate
.transport
.lock()
.as_fake()
.on_response::<R, F>(handler);
let transport = self.transport_delegate.transport().as_fake();
transport.on_response::<R, F>(handler).await;
}
#[cfg(any(test, feature = "test-support"))]
@@ -269,7 +308,7 @@ mod tests {
},
},
Box::new(|_| panic!("Did not expect to hit this code path")),
&mut cx.to_async(),
cx.to_async(),
)
.await
.unwrap();
@@ -351,7 +390,7 @@ mod tests {
);
}
}),
&mut cx.to_async(),
cx.to_async(),
)
.await
.unwrap();
@@ -409,7 +448,7 @@ mod tests {
);
}
}),
&mut cx.to_async(),
cx.to_async(),
)
.await
.unwrap();

View File

@@ -51,26 +51,18 @@ pub fn send_telemetry(scenario: &DebugScenario, location: TelemetrySpawnLocation
let Some(adapter) = cx.global::<DapRegistry>().adapter(&scenario.adapter) else {
return;
};
let kind = adapter
.request_kind(&scenario.config)
.ok()
.map(serde_json::to_value)
.and_then(Result::ok);
let dock = DebuggerSettings::get_global(cx).dock;
let config = scenario.config.clone();
let with_build_task = scenario.build.is_some();
let adapter_name = scenario.adapter.clone();
cx.spawn(async move |_| {
let kind = adapter
.request_kind(&config)
.await
.ok()
.map(serde_json::to_value)
.and_then(Result::ok);
telemetry::event!(
"Debugger Session Started",
spawn_location = location,
with_build_task = with_build_task,
kind = kind,
adapter = adapter_name,
dock_position = dock,
);
})
.detach();
telemetry::event!(
"Debugger Session Started",
spawn_location = location,
with_build_task = scenario.build.is_some(),
kind = kind,
adapter = scenario.adapter.as_ref(),
dock_position = dock,
);
}

View File

@@ -14,7 +14,7 @@ use crate::{
};
use std::{collections::BTreeMap, sync::Arc};
/// Given a user build configuration, locator creates a fill-in debug target ([DebugScenario]) on behalf of the user.
/// Given a user build configuration, locator creates a fill-in debug target ([DebugRequest]) on behalf of the user.
#[async_trait]
pub trait DapLocator: Send + Sync {
fn name(&self) -> SharedString;
@@ -50,32 +50,30 @@ impl DapRegistry {
let name = adapter.name();
let _previous_value = self.0.write().adapters.insert(name, adapter);
}
pub fn add_locator(&self, locator: Arc<dyn DapLocator>) {
self.0.write().locators.insert(locator.name(), locator);
}
pub fn remove_adapter(&self, name: &str) {
self.0.write().adapters.remove(name);
}
pub fn remove_locator(&self, locator: &str) {
self.0.write().locators.remove(locator);
}
pub fn adapter_language(&self, adapter_name: &str) -> Option<LanguageName> {
self.adapter(adapter_name)
.and_then(|adapter| adapter.adapter_language_name())
}
pub fn add_locator(&self, locator: Arc<dyn DapLocator>) {
let _previous_value = self.0.write().locators.insert(locator.name(), locator);
debug_assert!(
_previous_value.is_none(),
"Attempted to insert a new debug locator when one is already registered"
);
}
pub async fn adapters_schema(&self) -> task::AdapterSchemas {
let mut schemas = AdapterSchemas(vec![]);
// Clone to avoid holding lock over await points
let adapters = self.0.read().adapters.clone();
for (name, adapter) in adapters.into_iter() {
schemas.0.push(AdapterSchema {
adapter: name.into(),
schema: adapter.dap_schema(),
schema: adapter.dap_schema().await,
});
}

View File

@@ -1,19 +1,16 @@
use anyhow::{Context as _, Result, anyhow, bail};
#[cfg(any(test, feature = "test-support"))]
use async_pipe::{PipeReader, PipeWriter};
use dap_types::{
ErrorResponse,
messages::{Message, Response},
};
use futures::{AsyncRead, AsyncReadExt as _, AsyncWrite, FutureExt as _, channel::oneshot, select};
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Task};
use parking_lot::Mutex;
use proto::ErrorExt;
use gpui::{AppContext as _, AsyncApp, Task};
use settings::Settings as _;
use smallvec::SmallVec;
use smol::{
channel::{Receiver, Sender, unbounded},
io::{AsyncBufReadExt as _, AsyncWriteExt, BufReader},
lock::Mutex,
net::{TcpListener, TcpStream},
};
use std::{
@@ -26,11 +23,7 @@ use std::{
use task::TcpArgumentsTemplate;
use util::ConnectionResult;
use crate::{
adapters::{DebugAdapterBinary, TcpArguments},
client::DapMessageHandler,
debugger_settings::DebuggerSettings,
};
use crate::{adapters::DebugAdapterBinary, debugger_settings::DebuggerSettings};
pub(crate) type IoMessage = str;
pub(crate) type Command = str;
@@ -42,152 +35,232 @@ pub enum LogKind {
Rpc,
}
#[derive(Clone, Copy)]
pub enum IoKind {
StdIn,
StdOut,
StdErr,
}
type Requests = Arc<Mutex<HashMap<u64, oneshot::Sender<Result<Response>>>>>;
type LogHandlers = Arc<Mutex<SmallVec<[(LogKind, IoHandler); 2]>>>;
pub struct TransportPipe {
input: Box<dyn AsyncWrite + Unpin + Send + 'static>,
output: Box<dyn AsyncRead + Unpin + Send + 'static>,
stdout: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
stderr: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
}
pub trait Transport: Send + Sync {
fn has_adapter_logs(&self) -> bool;
fn tcp_arguments(&self) -> Option<TcpArguments>;
fn connect(
&mut self,
) -> Task<
Result<(
Box<dyn AsyncWrite + Unpin + Send + 'static>,
Box<dyn AsyncRead + Unpin + Send + 'static>,
)>,
>;
fn kill(&self);
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeTransport {
unreachable!()
impl TransportPipe {
pub fn new(
input: Box<dyn AsyncWrite + Unpin + Send + 'static>,
output: Box<dyn AsyncRead + Unpin + Send + 'static>,
stdout: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
stderr: Option<Box<dyn AsyncRead + Unpin + Send + 'static>>,
) -> Self {
TransportPipe {
input,
output,
stdout,
stderr,
}
}
}
async fn start(
binary: &DebugAdapterBinary,
log_handlers: LogHandlers,
cx: &mut AsyncApp,
) -> Result<Box<dyn Transport>> {
type Requests = Arc<Mutex<HashMap<u64, oneshot::Sender<Result<Response>>>>>;
type LogHandlers = Arc<parking_lot::Mutex<SmallVec<[(LogKind, IoHandler); 2]>>>;
pub enum Transport {
Stdio(StdioTransport),
Tcp(TcpTransport),
#[cfg(any(test, feature = "test-support"))]
if cfg!(any(test, feature = "test-support")) {
return Ok(Box::new(FakeTransport::start(cx).await?));
Fake(FakeTransport),
}
impl Transport {
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
#[cfg(any(test, feature = "test-support"))]
if cfg!(any(test, feature = "test-support")) {
return FakeTransport::start(cx)
.await
.map(|(transports, fake)| (transports, Self::Fake(fake)));
}
if binary.connection.is_some() {
TcpTransport::start(binary, cx)
.await
.map(|(transports, tcp)| (transports, Self::Tcp(tcp)))
.context("Tried to connect to a debug adapter via TCP transport layer")
} else {
StdioTransport::start(binary, cx)
.await
.map(|(transports, stdio)| (transports, Self::Stdio(stdio)))
.context("Tried to connect to a debug adapter via stdin/stdout transport layer")
}
}
if binary.connection.is_some() {
Ok(Box::new(
TcpTransport::start(binary, log_handlers, cx).await?,
))
} else {
Ok(Box::new(
StdioTransport::start(binary, log_handlers, cx).await?,
))
fn has_adapter_logs(&self) -> bool {
match self {
Transport::Stdio(stdio_transport) => stdio_transport.has_adapter_logs(),
Transport::Tcp(tcp_transport) => tcp_transport.has_adapter_logs(),
#[cfg(any(test, feature = "test-support"))]
Transport::Fake(fake_transport) => fake_transport.has_adapter_logs(),
}
}
async fn kill(&self) {
match self {
Transport::Stdio(stdio_transport) => stdio_transport.kill().await,
Transport::Tcp(tcp_transport) => tcp_transport.kill().await,
#[cfg(any(test, feature = "test-support"))]
Transport::Fake(fake_transport) => fake_transport.kill().await,
}
}
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn as_fake(&self) -> &FakeTransport {
match self {
Transport::Fake(fake_transport) => fake_transport,
_ => panic!("Not a fake transport layer"),
}
}
}
pub(crate) struct TransportDelegate {
log_handlers: LogHandlers,
current_requests: Requests,
pending_requests: Requests,
pub(crate) transport: Mutex<Box<dyn Transport>>,
server_tx: smol::lock::Mutex<Option<Sender<Message>>>,
tasks: Mutex<Vec<Task<()>>>,
transport: Transport,
server_tx: Arc<Mutex<Option<Sender<Message>>>>,
_tasks: Vec<Task<()>>,
}
impl TransportDelegate {
pub(crate) async fn start(binary: &DebugAdapterBinary, cx: &mut AsyncApp) -> Result<Self> {
let log_handlers: LogHandlers = Default::default();
let transport = start(binary, log_handlers.clone(), cx).await?;
Ok(Self {
transport: Mutex::new(transport),
log_handlers,
pub(crate) async fn start(
binary: &DebugAdapterBinary,
cx: AsyncApp,
) -> Result<((Receiver<Message>, Sender<Message>), Self)> {
let (transport_pipes, transport) = Transport::start(binary, cx.clone()).await?;
let mut this = Self {
transport,
server_tx: Default::default(),
log_handlers: Default::default(),
current_requests: Default::default(),
pending_requests: Default::default(),
tasks: Default::default(),
})
_tasks: Vec::new(),
};
let messages = this.start_handlers(transport_pipes, cx).await?;
Ok((messages, this))
}
pub async fn connect(
&self,
message_handler: DapMessageHandler,
cx: &mut AsyncApp,
) -> Result<()> {
async fn start_handlers(
&mut self,
mut params: TransportPipe,
cx: AsyncApp,
) -> Result<(Receiver<Message>, Sender<Message>)> {
let (client_tx, server_rx) = unbounded::<Message>();
let (server_tx, client_rx) = unbounded::<Message>();
self.tasks.lock().clear();
let log_dap_communications =
cx.update(|cx| DebuggerSettings::get_global(cx).log_dap_communications)
.with_context(|| "Failed to get Debugger Setting log dap communications error in transport::start_handlers. Defaulting to false")
.unwrap_or(false);
let connect = self.transport.lock().connect();
let (input, output) = connect.await?;
let log_handler = if log_dap_communications {
Some(self.log_handlers.clone())
} else {
None
};
let pending_requests = self.pending_requests.clone();
let output_log_handler = log_handler.clone();
{
let mut tasks = self.tasks.lock();
tasks.push(cx.background_spawn(async move {
match Self::recv_from_server(
output,
message_handler,
let adapter_log_handler = log_handler.clone();
cx.update(|cx| {
if let Some(stdout) = params.stdout.take() {
self._tasks.push(cx.background_spawn(async move {
match Self::handle_adapter_log(stdout, adapter_log_handler).await {
ConnectionResult::Timeout => {
log::error!("Timed out when handling debugger log");
}
ConnectionResult::ConnectionReset => {
log::info!("Debugger logs connection closed");
}
ConnectionResult::Result(Ok(())) => {}
ConnectionResult::Result(Err(e)) => {
log::error!("Error handling debugger log: {e}");
}
}
}));
}
let pending_requests = self.pending_requests.clone();
let output_log_handler = log_handler.clone();
self._tasks.push(cx.background_spawn(async move {
match Self::handle_output(
params.output,
client_tx,
pending_requests.clone(),
output_log_handler,
)
.await
{
Ok(()) => {
pending_requests.lock().drain().for_each(|(_, request)| {
request
.send(Err(anyhow!("debugger shutdown unexpectedly")))
.ok();
});
}
Err(e) => {
pending_requests.lock().drain().for_each(|(_, request)| {
request.send(Err(e.cloned())).ok();
});
}
Ok(()) => {}
Err(e) => log::error!("Error handling debugger output: {e}"),
}
let mut pending_requests = pending_requests.lock().await;
pending_requests.drain().for_each(|(_, request)| {
request
.send(Err(anyhow!("debugger shutdown unexpectedly")))
.ok();
});
}));
tasks.push(cx.background_spawn(async move {
match Self::send_to_server(input, client_rx, log_handler).await {
if let Some(stderr) = params.stderr.take() {
let log_handlers = self.log_handlers.clone();
self._tasks.push(cx.background_spawn(async move {
match Self::handle_error(stderr, log_handlers).await {
ConnectionResult::Timeout => {
log::error!("Timed out reading debugger error stream")
}
ConnectionResult::ConnectionReset => {
log::info!("Debugger closed its error stream")
}
ConnectionResult::Result(Ok(())) => {}
ConnectionResult::Result(Err(e)) => {
log::error!("Error handling debugger error: {e}")
}
}
}));
}
let current_requests = self.current_requests.clone();
let pending_requests = self.pending_requests.clone();
let log_handler = log_handler.clone();
self._tasks.push(cx.background_spawn(async move {
match Self::handle_input(
params.input,
client_rx,
current_requests,
pending_requests,
log_handler,
)
.await
{
Ok(()) => {}
Err(e) => log::error!("Error handling debugger input: {e}"),
}
}));
}
})?;
{
let mut lock = self.server_tx.lock().await;
*lock = Some(server_tx.clone());
}
Ok(())
Ok((server_rx, server_tx))
}
pub(crate) fn tcp_arguments(&self) -> Option<TcpArguments> {
self.transport.lock().tcp_arguments()
}
pub(crate) fn add_pending_request(
pub(crate) async fn add_pending_request(
&self,
sequence_id: u64,
request: oneshot::Sender<Result<Response>>,
) {
let mut pending_requests = self.pending_requests.lock();
let mut pending_requests = self.pending_requests.lock().await;
pending_requests.insert(sequence_id, request);
}
@@ -199,41 +272,52 @@ impl TransportDelegate {
}
}
async fn handle_adapter_log(
stdout: impl AsyncRead + Unpin + Send + 'static,
iokind: IoKind,
log_handlers: LogHandlers,
) {
async fn handle_adapter_log<Stdout>(
stdout: Stdout,
log_handlers: Option<LogHandlers>,
) -> ConnectionResult<()>
where
Stdout: AsyncRead + Unpin + Send + 'static,
{
let mut reader = BufReader::new(stdout);
let mut line = String::new();
loop {
let result = loop {
line.truncate(0);
match reader.read_line(&mut line).await {
Ok(0) => break,
match reader
.read_line(&mut line)
.await
.context("reading adapter log line")
{
Ok(0) => break ConnectionResult::ConnectionReset,
Ok(_) => {}
Err(e) => {
log::debug!("handle_adapter_log: {}", e);
break;
}
Err(e) => break ConnectionResult::Result(Err(e)),
}
for (kind, handler) in log_handlers.lock().iter_mut() {
if matches!(kind, LogKind::Adapter) {
handler(iokind, None, line.as_str());
if let Some(log_handlers) = log_handlers.as_ref() {
for (kind, handler) in log_handlers.lock().iter_mut() {
if matches!(kind, LogKind::Adapter) {
handler(IoKind::StdOut, None, line.as_str());
}
}
}
}
};
log::debug!("Handle adapter log dropped");
result
}
fn build_rpc_message(message: String) -> String {
format!("Content-Length: {}\r\n\r\n{}", message.len(), message)
}
async fn send_to_server<Stdin>(
async fn handle_input<Stdin>(
mut server_stdin: Stdin,
client_rx: Receiver<Message>,
current_requests: Requests,
pending_requests: Requests,
log_handlers: Option<LogHandlers>,
) -> Result<()>
where
@@ -242,6 +326,12 @@ impl TransportDelegate {
let result = loop {
match client_rx.recv().await {
Ok(message) => {
if let Message::Request(request) = &message {
if let Some(sender) = current_requests.lock().await.remove(&request.seq) {
pending_requests.lock().await.insert(request.seq, sender);
}
}
let command = match &message {
Message::Request(request) => Some(request.command.as_str()),
Message::Response(response) => Some(response.command.as_str()),
@@ -281,9 +371,9 @@ impl TransportDelegate {
result
}
async fn recv_from_server<Stdout>(
async fn handle_output<Stdout>(
server_stdout: Stdout,
mut message_handler: DapMessageHandler,
client_tx: Sender<Message>,
pending_requests: Requests,
log_handlers: Option<LogHandlers>,
) -> Result<()>
@@ -303,25 +393,59 @@ impl TransportDelegate {
return Ok(());
}
ConnectionResult::Result(Ok(Message::Response(res))) => {
let tx = pending_requests.lock().remove(&res.request_seq);
if let Some(tx) = tx {
if let Some(tx) = pending_requests.lock().await.remove(&res.request_seq) {
if let Err(e) = tx.send(Self::process_response(res)) {
log::trace!("Did not send response `{:?}` for a cancelled", e);
}
} else {
message_handler(Message::Response(res))
client_tx.send(Message::Response(res)).await?;
}
}
ConnectionResult::Result(Ok(message)) => message_handler(message),
ConnectionResult::Result(Ok(message)) => client_tx.send(message).await?,
ConnectionResult::Result(Err(e)) => break Err(e),
}
};
drop(client_tx);
log::debug!("Handle adapter output dropped");
result
}
async fn handle_error<Stderr>(stderr: Stderr, log_handlers: LogHandlers) -> ConnectionResult<()>
where
Stderr: AsyncRead + Unpin + Send + 'static,
{
log::debug!("Handle error started");
let mut buffer = String::new();
let mut reader = BufReader::new(stderr);
let result = loop {
match reader
.read_line(&mut buffer)
.await
.context("reading error log line")
{
Ok(0) => break ConnectionResult::ConnectionReset,
Ok(_) => {
for (kind, log_handler) in log_handlers.lock().iter_mut() {
if matches!(kind, LogKind::Adapter) {
log_handler(IoKind::StdErr, None, buffer.as_str());
}
}
buffer.truncate(0);
}
Err(error) => break ConnectionResult::Result(Err(error)),
}
};
log::debug!("Handle adapter error dropped");
result
}
fn process_response(response: Response) -> Result<Response> {
if response.success {
Ok(response)
@@ -355,10 +479,14 @@ impl TransportDelegate {
loop {
buffer.truncate(0);
match reader.read_line(buffer).await {
match reader
.read_line(buffer)
.await
.with_context(|| "reading a message from server")
{
Ok(0) => return ConnectionResult::ConnectionReset,
Ok(_) => {}
Err(e) => return ConnectionResult::Result(Err(e.into())),
Err(e) => return ConnectionResult::Result(Err(e)),
};
if buffer == "\r\n" {
@@ -419,8 +547,16 @@ impl TransportDelegate {
server_tx.close();
}
self.pending_requests.lock().clear();
self.transport.lock().kill();
let mut current_requests = self.current_requests.lock().await;
let mut pending_requests = self.pending_requests.lock().await;
current_requests.clear();
pending_requests.clear();
self.transport.kill().await;
drop(current_requests);
drop(pending_requests);
log::debug!("Shutdown client completed");
@@ -428,7 +564,11 @@ impl TransportDelegate {
}
pub fn has_adapter_logs(&self) -> bool {
self.transport.lock().has_adapter_logs()
self.transport.has_adapter_logs()
}
pub fn transport(&self) -> &Transport {
&self.transport
}
pub fn add_log_handler<F>(&self, f: F, kind: LogKind)
@@ -441,13 +581,10 @@ impl TransportDelegate {
}
pub struct TcpTransport {
executor: BackgroundExecutor,
pub port: u16,
pub host: Ipv4Addr,
pub timeout: u64,
process: Arc<Mutex<Option<Child>>>,
_stderr_task: Option<Task<()>>,
_stdout_task: Option<Task<()>>,
process: Option<Mutex<Child>>,
}
impl TcpTransport {
@@ -467,11 +604,7 @@ impl TcpTransport {
.port())
}
async fn start(
binary: &DebugAdapterBinary,
log_handlers: LogHandlers,
cx: &mut AsyncApp,
) -> Result<Self> {
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
let connection_args = binary
.connection
.as_ref()
@@ -480,11 +613,7 @@ impl TcpTransport {
let host = connection_args.host;
let port = connection_args.port;
let mut process = None;
let mut stdout_task = None;
let mut stderr_task = None;
if let Some(command) = &binary.command {
let mut process = if let Some(command) = &binary.command {
let mut command = util::command::new_std_command(&command);
if let Some(cwd) = &binary.cwd {
@@ -494,142 +623,101 @@ impl TcpTransport {
command.args(&binary.arguments);
command.envs(&binary.envs);
let mut p = Child::spawn(command, Stdio::null())
.with_context(|| "failed to start debug adapter.")?;
stdout_task = p.stdout.take().map(|stdout| {
cx.background_executor()
.spawn(TransportDelegate::handle_adapter_log(
stdout,
IoKind::StdOut,
log_handlers.clone(),
))
});
stderr_task = p.stderr.take().map(|stderr| {
cx.background_executor()
.spawn(TransportDelegate::handle_adapter_log(
stderr,
IoKind::StdErr,
log_handlers,
))
});
process = Some(p);
Some(
Child::spawn(command, Stdio::null())
.with_context(|| "failed to start debug adapter.")?,
)
} else {
None
};
let address = SocketAddrV4::new(host, port);
let timeout = connection_args.timeout.unwrap_or_else(|| {
cx.update(|cx| DebuggerSettings::get_global(cx).timeout)
.unwrap_or(20000u64)
.unwrap_or(2000u64)
});
let (mut process, (rx, tx)) = select! {
_ = cx.background_executor().timer(Duration::from_millis(timeout)).fuse() => {
anyhow::bail!("Connection to TCP DAP timeout {host}:{port}");
},
result = cx.spawn(async move |cx| {
loop {
match TcpStream::connect(address).await {
Ok(stream) => return Ok((process, stream.split())),
Err(_) => {
if let Some(p) = &mut process {
if let Ok(Some(_)) = p.try_status() {
let output = process.take().unwrap().into_inner().output().await?;
let output = if output.stderr.is_empty() {
String::from_utf8_lossy(&output.stdout).to_string()
} else {
String::from_utf8_lossy(&output.stderr).to_string()
};
anyhow::bail!("{output}\nerror: process exited before debugger attached.");
}
}
cx.background_executor().timer(Duration::from_millis(100)).await;
}
}
}
}).fuse() => result?
};
log::info!(
"Debug adapter has connected to TCP server {}:{}",
host,
port
);
let stdout = process.as_mut().and_then(|p| p.stdout.take());
let stderr = process.as_mut().and_then(|p| p.stderr.take());
let this = Self {
executor: cx.background_executor().clone(),
port,
host,
process: Arc::new(Mutex::new(process)),
process: process.map(Mutex::new),
timeout,
_stdout_task: stdout_task,
_stderr_task: stderr_task,
};
Ok(this)
}
}
let pipe = TransportPipe::new(
Box::new(tx),
Box::new(BufReader::new(rx)),
stdout.map(|s| Box::new(s) as Box<dyn AsyncRead + Unpin + Send>),
stderr.map(|s| Box::new(s) as Box<dyn AsyncRead + Unpin + Send>),
);
Ok((pipe, this))
}
impl Transport for TcpTransport {
fn has_adapter_logs(&self) -> bool {
true
}
fn kill(&self) {
if let Some(process) = &mut *self.process.lock() {
process.kill();
async fn kill(&self) {
if let Some(process) = &self.process {
let mut process = process.lock().await;
Child::kill(&mut process);
}
}
fn tcp_arguments(&self) -> Option<TcpArguments> {
Some(TcpArguments {
host: self.host,
port: self.port,
timeout: Some(self.timeout),
})
}
fn connect(
&mut self,
) -> Task<
Result<(
Box<dyn AsyncWrite + Unpin + Send + 'static>,
Box<dyn AsyncRead + Unpin + Send + 'static>,
)>,
> {
let executor = self.executor.clone();
let timeout = self.timeout;
let address = SocketAddrV4::new(self.host, self.port);
let process = self.process.clone();
executor.clone().spawn(async move {
select! {
_ = executor.timer(Duration::from_millis(timeout)).fuse() => {
anyhow::bail!("Connection to TCP DAP timeout {address}");
},
result = executor.clone().spawn(async move {
loop {
match TcpStream::connect(address).await {
Ok(stream) => {
let (read, write) = stream.split();
return Ok((Box::new(write) as _, Box::new(read) as _))
},
Err(_) => {
let has_process = process.lock().is_some();
if has_process {
let status = process.lock().as_mut().unwrap().try_status();
if let Ok(Some(_)) = status {
let process = process.lock().take().unwrap().into_inner();
let output = process.output().await?;
let output = if output.stderr.is_empty() {
String::from_utf8_lossy(&output.stdout).to_string()
} else {
String::from_utf8_lossy(&output.stderr).to_string()
};
anyhow::bail!("{output}\nerror: process exited before debugger attached.");
}
}
executor.timer(Duration::from_millis(100)).await;
}
}
}
}).fuse() => result
}
})
}
}
impl Drop for TcpTransport {
fn drop(&mut self) {
if let Some(mut p) = self.process.lock().take() {
p.kill();
if let Some(mut p) = self.process.take() {
p.get_mut().kill();
}
}
}
pub struct StdioTransport {
process: Mutex<Child>,
_stderr_task: Option<Task<()>>,
}
impl StdioTransport {
// #[allow(dead_code, reason = "This is used in non test builds of Zed")]
async fn start(
binary: &DebugAdapterBinary,
log_handlers: LogHandlers,
cx: &mut AsyncApp,
) -> Result<Self> {
#[allow(dead_code, reason = "This is used in non test builds of Zed")]
async fn start(binary: &DebugAdapterBinary, _: AsyncApp) -> Result<(TransportPipe, Self)> {
let Some(binary_command) = &binary.command else {
bail!(
"When using the `stdio` transport, the path to a debug adapter binary must be set by Zed."
@@ -652,52 +740,42 @@ impl StdioTransport {
)
})?;
let err_task = process.stderr.take().map(|stderr| {
cx.background_spawn(TransportDelegate::handle_adapter_log(
stderr,
IoKind::StdErr,
log_handlers,
))
});
let stdin = process.stdin.take().context("Failed to open stdin")?;
let stdout = process.stdout.take().context("Failed to open stdout")?;
let stderr = process
.stderr
.take()
.map(|io_err| Box::new(io_err) as Box<dyn AsyncRead + Unpin + Send>);
if stderr.is_none() {
bail!(
"Failed to connect to stderr for debug adapter command {}",
&binary_command
);
}
log::info!("Debug adapter has connected to stdio adapter");
let process = Mutex::new(process);
Ok(Self {
process,
_stderr_task: err_task,
})
Ok((
TransportPipe::new(
Box::new(stdin),
Box::new(BufReader::new(stdout)),
None,
stderr,
),
Self { process },
))
}
}
impl Transport for StdioTransport {
fn has_adapter_logs(&self) -> bool {
false
}
fn kill(&self) {
self.process.lock().kill()
}
fn connect(
&mut self,
) -> Task<
Result<(
Box<dyn AsyncWrite + Unpin + Send + 'static>,
Box<dyn AsyncRead + Unpin + Send + 'static>,
)>,
> {
let mut process = self.process.lock();
let result = util::maybe!({
Ok((
Box::new(process.stdin.take().context("Cannot reconnect")?) as _,
Box::new(process.stdout.take().context("Cannot reconnect")?) as _,
))
});
Task::ready(result)
}
fn tcp_arguments(&self) -> Option<TcpArguments> {
None
async fn kill(&self) {
let mut process = self.process.lock().await;
Child::kill(&mut process);
}
}
@@ -717,12 +795,9 @@ type ResponseHandler = Box<dyn Send + Fn(Response)>;
#[cfg(any(test, feature = "test-support"))]
pub struct FakeTransport {
// for sending fake response back from adapter side
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
request_handlers: Arc<parking_lot::Mutex<HashMap<&'static str, RequestHandler>>>,
// for reverse request responses
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
stdin_writer: Option<PipeWriter>,
stdout_reader: Option<PipeReader>,
response_handlers: Arc<parking_lot::Mutex<HashMap<&'static str, ResponseHandler>>>,
}
#[cfg(any(test, feature = "test-support"))]
@@ -758,7 +833,7 @@ impl FakeTransport {
);
}
pub fn on_response<R: dap_types::requests::Request, F>(&self, handler: F)
pub async fn on_response<R: dap_types::requests::Request, F>(&self, handler: F)
where
F: 'static + Send + Fn(Response),
{
@@ -767,23 +842,20 @@ impl FakeTransport {
.insert(R::COMMAND, Box::new(handler));
}
async fn start(cx: &mut AsyncApp) -> Result<Self> {
async fn start(cx: AsyncApp) -> Result<(TransportPipe, Self)> {
let this = Self {
request_handlers: Arc::new(parking_lot::Mutex::new(HashMap::default())),
response_handlers: Arc::new(parking_lot::Mutex::new(HashMap::default())),
};
use dap_types::requests::{Request, RunInTerminal, StartDebugging};
use serde_json::json;
let (stdin_writer, stdin_reader) = async_pipe::pipe();
let (stdout_writer, stdout_reader) = async_pipe::pipe();
let this = Self {
request_handlers: Arc::new(Mutex::new(HashMap::default())),
response_handlers: Arc::new(Mutex::new(HashMap::default())),
stdin_writer: Some(stdin_writer),
stdout_reader: Some(stdout_reader),
};
let request_handlers = this.request_handlers.clone();
let response_handlers = this.response_handlers.clone();
let stdout_writer = Arc::new(smol::lock::Mutex::new(stdout_writer));
let stdout_writer = Arc::new(Mutex::new(stdout_writer));
cx.background_spawn(async move {
let mut reader = BufReader::new(stdin_reader);
@@ -873,43 +945,17 @@ impl FakeTransport {
})
.detach();
Ok(this)
}
}
#[cfg(any(test, feature = "test-support"))]
impl Transport for FakeTransport {
fn tcp_arguments(&self) -> Option<TcpArguments> {
None
}
fn connect(
&mut self,
) -> Task<
Result<(
Box<dyn AsyncWrite + Unpin + Send + 'static>,
Box<dyn AsyncRead + Unpin + Send + 'static>,
)>,
> {
let result = util::maybe!({
Ok((
Box::new(self.stdin_writer.take().context("Cannot reconnect")?) as _,
Box::new(self.stdout_reader.take().context("Cannot reconnect")?) as _,
))
});
Task::ready(result)
Ok((
TransportPipe::new(Box::new(stdin_writer), Box::new(stdout_reader), None, None),
this,
))
}
fn has_adapter_logs(&self) -> bool {
false
}
fn kill(&self) {}
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeTransport {
self
}
async fn kill(&self) {}
}
struct Child {

View File

@@ -19,7 +19,7 @@ pub(crate) struct CodeLldbDebugAdapter {
impl CodeLldbDebugAdapter {
const ADAPTER_NAME: &'static str = "CodeLLDB";
async fn request_args(
fn request_args(
&self,
delegate: &Arc<dyn DapDelegate>,
task_definition: &DebugTaskDefinition,
@@ -37,7 +37,7 @@ impl CodeLldbDebugAdapter {
obj.entry("cwd")
.or_insert(delegate.worktree_root_path().to_string_lossy().into());
let request = self.request_kind(&configuration).await?;
let request = self.request_kind(&configuration)?;
Ok(dap::StartDebuggingRequestArguments {
request,
@@ -89,7 +89,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let mut configuration = json!({
"request": match zed_scenario.request {
DebugRequest::Launch(_) => "launch",
@@ -133,7 +133,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
})
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"properties": {
"request": {
@@ -368,7 +368,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
"--settings".into(),
json!({"sourceLanguages": ["cpp", "rust"]}).to_string(),
],
request_args: self.request_args(delegate, &config).await?,
request_args: self.request_args(delegate, &config)?,
envs: HashMap::default(),
connection: None,
})

View File

@@ -21,7 +21,7 @@ impl DebugAdapter for GdbDebugAdapter {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let mut obj = serde_json::Map::default();
match &zed_scenario.request {
@@ -63,7 +63,7 @@ impl DebugAdapter for GdbDebugAdapter {
})
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"oneOf": [
{
@@ -191,7 +191,7 @@ impl DebugAdapter for GdbDebugAdapter {
cwd: Some(delegate.worktree_root_path().to_path_buf()),
connection: None,
request_args: StartDebuggingRequestArguments {
request: self.request_kind(&config.config).await?,
request: self.request_kind(&config.config)?,
configuration,
},
})

View File

@@ -1,5 +1,4 @@
use anyhow::{Context as _, bail};
use collections::HashMap;
use dap::{
StartDebuggingRequestArguments,
adapters::{
@@ -10,7 +9,7 @@ use dap::{
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
use std::{env::consts, ffi::OsStr, path::PathBuf, sync::OnceLock};
use std::{collections::HashMap, env::consts, ffi::OsStr, path::PathBuf, sync::OnceLock};
use task::TcpArgumentsTemplate;
use util;
@@ -96,7 +95,7 @@ impl DebugAdapter for GoDebugAdapter {
Some(SharedString::new_static("Go").into())
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
// Create common properties shared between launch and attach
let common_properties = json!({
"debugAdapter": {
@@ -352,7 +351,7 @@ impl DebugAdapter for GoDebugAdapter {
})
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let mut args = match &zed_scenario.request {
dap::DebugRequest::Attach(attach_config) => {
json!({
@@ -495,7 +494,7 @@ impl DebugAdapter for GoDebugAdapter {
connection,
request_args: StartDebuggingRequestArguments {
configuration,
request: self.request_kind(&task_definition.config).await?,
request: self.request_kind(&task_definition.config)?,
},
})
}

View File

@@ -124,7 +124,7 @@ impl JsDebugAdapter {
}),
request_args: StartDebuggingRequestArguments {
configuration,
request: self.request_kind(&task_definition.config).await?,
request: self.request_kind(&task_definition.config)?,
},
})
}
@@ -136,7 +136,7 @@ impl DebugAdapter for JsDebugAdapter {
DebugAdapterName(Self::ADAPTER_NAME.into())
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let mut args = json!({
"type": "pwa-node",
"request": match zed_scenario.request {
@@ -182,7 +182,7 @@ impl DebugAdapter for JsDebugAdapter {
})
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"oneOf": [
{

View File

@@ -102,8 +102,7 @@ impl PhpDebugAdapter {
envs: HashMap::default(),
request_args: StartDebuggingRequestArguments {
configuration,
request: <Self as DebugAdapter>::request_kind(self, &task_definition.config)
.await?,
request: <Self as DebugAdapter>::request_kind(self, &task_definition.config)?,
},
})
}
@@ -111,7 +110,7 @@ impl PhpDebugAdapter {
#[async_trait(?Send)]
impl DebugAdapter for PhpDebugAdapter {
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"properties": {
"request": {
@@ -291,14 +290,11 @@ impl DebugAdapter for PhpDebugAdapter {
Some(SharedString::new_static("PHP").into())
}
async fn request_kind(
&self,
_: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
fn request_kind(&self, _: &serde_json::Value) -> Result<StartDebuggingRequestArgumentsRequest> {
Ok(StartDebuggingRequestArgumentsRequest::Launch)
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let obj = match &zed_scenario.request {
dap::DebugRequest::Attach(_) => {
bail!("Php adapter doesn't support attaching")

View File

@@ -81,12 +81,12 @@ impl PythonDebugAdapter {
}
}
async fn request_args(
fn request_args(
&self,
delegate: &Arc<dyn DapDelegate>,
task_definition: &DebugTaskDefinition,
) -> Result<StartDebuggingRequestArguments> {
let request = self.request_kind(&task_definition.config).await?;
let request = self.request_kind(&task_definition.config)?;
let mut configuration = task_definition.config.clone();
if let Ok(console) = configuration.dot_get_mut("console") {
@@ -202,7 +202,7 @@ impl PythonDebugAdapter {
}),
cwd: Some(delegate.worktree_root_path().to_path_buf()),
envs: HashMap::default(),
request_args: self.request_args(delegate, config).await?,
request_args: self.request_args(delegate, config)?,
})
}
}
@@ -217,7 +217,7 @@ impl DebugAdapter for PythonDebugAdapter {
Some(SharedString::new_static("Python").into())
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
let mut args = json!({
"request": match zed_scenario.request {
DebugRequest::Launch(_) => "launch",
@@ -257,7 +257,7 @@ impl DebugAdapter for PythonDebugAdapter {
})
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"properties": {
"request": {

View File

@@ -45,14 +45,11 @@ impl DebugAdapter for RubyDebugAdapter {
Some(SharedString::new_static("Ruby").into())
}
async fn request_kind(
&self,
_: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
fn request_kind(&self, _: &serde_json::Value) -> Result<StartDebuggingRequestArgumentsRequest> {
Ok(StartDebuggingRequestArgumentsRequest::Launch)
}
fn dap_schema(&self) -> serde_json::Value {
async fn dap_schema(&self) -> serde_json::Value {
json!({
"type": "object",
"properties": {
@@ -86,7 +83,7 @@ impl DebugAdapter for RubyDebugAdapter {
})
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
match zed_scenario.request {
DebugRequest::Launch(launch) => {
let config = RubyDebugConfig {
@@ -199,7 +196,7 @@ impl DebugAdapter for RubyDebugAdapter {
),
envs: ruby_config.env.into_iter().collect(),
request_args: StartDebuggingRequestArguments {
request: self.request_kind(&definition.config).await?,
request: self.request_kind(&definition.config)?,
configuration,
},
})

View File

@@ -12,7 +12,6 @@ dap.workspace = true
extension.workspace = true
gpui.workspace = true
serde_json.workspace = true
util.workspace = true
task.workspace = true
workspace-hack = { version = "0.1", path = "../../tooling/workspace-hack" }

View File

@@ -1,15 +1,11 @@
mod extension_dap_adapter;
mod extension_locator_adapter;
use std::{path::Path, sync::Arc};
use std::sync::Arc;
use dap::DapRegistry;
use extension::{ExtensionDebugAdapterProviderProxy, ExtensionHostProxy};
use extension_dap_adapter::ExtensionDapAdapter;
use gpui::App;
use util::ResultExt;
use crate::extension_locator_adapter::ExtensionLocatorAdapter;
pub fn init(extension_host_proxy: Arc<ExtensionHostProxy>, cx: &mut App) {
let language_server_registry_proxy = DebugAdapterRegistryProxy::new(cx);
@@ -34,33 +30,11 @@ impl ExtensionDebugAdapterProviderProxy for DebugAdapterRegistryProxy {
&self,
extension: Arc<dyn extension::Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) {
if let Some(adapter) =
ExtensionDapAdapter::new(extension, debug_adapter_name, schema_path).log_err()
{
self.debug_adapter_registry.add_adapter(Arc::new(adapter));
}
}
fn register_debug_locator(
&self,
extension: Arc<dyn extension::Extension>,
locator_name: Arc<str>,
) {
self.debug_adapter_registry
.add_locator(Arc::new(ExtensionLocatorAdapter::new(
.add_adapter(Arc::new(ExtensionDapAdapter::new(
extension,
locator_name,
debug_adapter_name,
)));
}
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>) {
self.debug_adapter_registry
.remove_adapter(&debug_adapter_name);
}
fn unregister_debug_locator(&self, locator_name: Arc<str>) {
self.debug_adapter_registry.remove_locator(&locator_name);
}
}

View File

@@ -1,16 +1,9 @@
use std::{
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use std::{path::PathBuf, sync::Arc};
use anyhow::{Context, Result};
use anyhow::Result;
use async_trait::async_trait;
use dap::{
StartDebuggingRequestArgumentsRequest,
adapters::{
DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
},
use dap::adapters::{
DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
};
use extension::{Extension, WorktreeDelegate};
use gpui::AsyncApp;
@@ -19,28 +12,17 @@ use task::{DebugScenario, ZedDebugConfig};
pub(crate) struct ExtensionDapAdapter {
extension: Arc<dyn Extension>,
debug_adapter_name: Arc<str>,
schema: serde_json::Value,
}
impl ExtensionDapAdapter {
pub(crate) fn new(
extension: Arc<dyn extension::Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) -> Result<Self> {
let schema = std::fs::read_to_string(&schema_path).with_context(|| {
format!(
"Failed to read debug adapter schema for {debug_adapter_name} (from path: `{schema_path:?}`)"
)
})?;
let schema = serde_json::Value::from_str(&schema).with_context(|| {
format!("Debug adapter schema for {debug_adapter_name} is not a valid JSON")
})?;
Ok(Self {
) -> Self {
Self {
extension,
debug_adapter_name,
schema,
})
}
}
}
@@ -79,8 +61,8 @@ impl DebugAdapter for ExtensionDapAdapter {
self.debug_adapter_name.as_ref().into()
}
fn dap_schema(&self) -> serde_json::Value {
self.schema.clone()
async fn dap_schema(&self) -> serde_json::Value {
self.extension.get_dap_schema().await.unwrap_or_default()
}
async fn get_binary(
@@ -100,16 +82,7 @@ impl DebugAdapter for ExtensionDapAdapter {
.await
}
async fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
self.extension.dap_config_to_scenario(zed_scenario).await
}
async fn request_kind(
&self,
config: &serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
self.extension
.dap_request_kind(self.debug_adapter_name.clone(), config.clone())
.await
fn config_from_zed_format(&self, _zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
Err(anyhow::anyhow!("DAP extensions are not implemented yet"))
}
}

View File

@@ -1,50 +0,0 @@
use anyhow::Result;
use async_trait::async_trait;
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
use extension::Extension;
use gpui::SharedString;
use std::sync::Arc;
use task::{DebugScenario, SpawnInTerminal, TaskTemplate};
pub(crate) struct ExtensionLocatorAdapter {
extension: Arc<dyn Extension>,
locator_name: SharedString,
}
impl ExtensionLocatorAdapter {
pub(crate) fn new(extension: Arc<dyn extension::Extension>, locator_name: Arc<str>) -> Self {
Self {
extension,
locator_name: SharedString::from(locator_name),
}
}
}
#[async_trait]
impl DapLocator for ExtensionLocatorAdapter {
fn name(&self) -> SharedString {
self.locator_name.clone()
}
/// Determines whether this locator can generate debug target for given task.
async fn create_scenario(
&self,
build_config: &TaskTemplate,
resolved_label: &str,
adapter: &DebugAdapterName,
) -> Option<DebugScenario> {
self.extension
.dap_locator_create_scenario(
self.locator_name.as_ref().to_owned(),
build_config.clone(),
resolved_label.to_owned(),
adapter.0.as_ref().to_owned(),
)
.await
.ok()
.flatten()
}
async fn run(&self, _build_config: SpawnInTerminal) -> Result<DebugRequest> {
Err(anyhow::anyhow!("Not implemented"))
}
}

View File

@@ -26,7 +26,6 @@ test-support = [
]
[dependencies]
alacritty_terminal.workspace = true
anyhow.workspace = true
client.workspace = true
collections.workspace = true

View File

@@ -228,36 +228,26 @@ impl PickerDelegate for AttachModalDelegate {
}
}
let Some(adapter) = cx.read_global::<DapRegistry, _>(|registry, _| {
registry.adapter(&self.definition.adapter)
let Some(scenario) = cx.read_global::<DapRegistry, _>(|registry, _| {
registry
.adapter(&self.definition.adapter)
.and_then(|adapter| adapter.config_from_zed_format(self.definition.clone()).ok())
}) else {
return;
};
let workspace = self.workspace.clone();
let definition = self.definition.clone();
cx.spawn_in(window, async move |this, cx| {
let Ok(scenario) = adapter.config_from_zed_format(definition).await else {
return;
};
let panel = self
.workspace
.update(cx, |workspace, cx| workspace.panel::<DebugPanel>(cx))
.ok()
.flatten();
if let Some(panel) = panel {
panel.update(cx, |panel, cx| {
panel.start_session(scenario, Default::default(), None, None, window, cx);
});
}
let panel = workspace
.update(cx, |workspace, cx| workspace.panel::<DebugPanel>(cx))
.ok()
.flatten();
if let Some(panel) = panel {
panel
.update_in(cx, |panel, window, cx| {
panel.start_session(scenario, Default::default(), None, None, window, cx);
})
.ok();
}
this.update(cx, |_, cx| {
cx.emit(DismissEvent);
})
.ok();
})
.detach();
cx.emit(DismissEvent);
}
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {

View File

@@ -176,7 +176,6 @@ impl DebugPanel {
dap_store.new_session(
scenario.label.clone(),
DebugAdapterName(scenario.adapter.clone()),
task_context.clone(),
None,
cx,
)
@@ -339,13 +338,12 @@ impl DebugPanel {
let adapter = curr_session.read(cx).adapter().clone();
let binary = curr_session.read(cx).binary().cloned().unwrap();
let task = curr_session.update(cx, |session, cx| session.shutdown(cx));
let task_context = curr_session.read(cx).task_context().clone();
cx.spawn_in(window, async move |this, cx| {
task.await;
let (session, task) = dap_store_handle.update(cx, |dap_store, cx| {
let session = dap_store.new_session(label, adapter, task_context, None, cx);
let session = dap_store.new_session(label, adapter, None, cx);
let task = session.update(cx, |session, cx| {
session.boot(binary, worktree, dap_store_handle.downgrade(), cx)
@@ -395,17 +393,11 @@ impl DebugPanel {
log::error!("Attempted to start a child-session without a binary");
return;
};
let task_context = parent_session.read(cx).task_context().clone();
binary.request_args = request.clone();
cx.spawn_in(window, async move |this, cx| {
let (session, task) = dap_store_handle.update(cx, |dap_store, cx| {
let session = dap_store.new_session(
label,
adapter,
task_context,
Some(parent_session.clone()),
cx,
);
let session =
dap_store.new_session(label, adapter, Some(parent_session.clone()), cx);
let task = session.update(cx, |session, cx| {
session.boot(binary, worktree, dap_store_handle.downgrade(), cx)
@@ -942,6 +934,9 @@ impl DebugPanel {
cx: &mut Context<Self>,
) {
debug_assert!(self.sessions.contains(&session_item));
self.project.read(cx).dap_store().update(cx, |this, cx| {
this.set_active_session(&session_item.read(cx).session(cx), cx);
});
session_item.focus_handle(cx).focus(window);
session_item.update(cx, |this, cx| {
this.running_state().update(cx, |this, cx| {

View File

@@ -1,4 +1,3 @@
use anyhow::bail;
use collections::{FxHashMap, HashMap};
use language::LanguageRegistry;
use paths::local_debug_file_relative_path;
@@ -308,16 +307,16 @@ impl NewProcessModal {
}
}
fn debug_scenario(&self, debugger: &str, cx: &App) -> Task<Option<DebugScenario>> {
fn debug_scenario(&self, debugger: &str, cx: &App) -> Option<DebugScenario> {
let request = match self.mode {
NewProcessMode::Launch => {
DebugRequest::Launch(self.configure_mode.read(cx).debug_request(cx))
}
NewProcessMode::Attach => {
DebugRequest::Attach(self.attach_mode.read(cx).debug_request())
}
_ => return Task::ready(None),
};
NewProcessMode::Launch => Some(DebugRequest::Launch(
self.configure_mode.read(cx).debug_request(cx),
)),
NewProcessMode::Attach => Some(DebugRequest::Attach(
self.attach_mode.read(cx).debug_request(),
)),
_ => None,
}?;
let label = suggested_label(&request, debugger);
let stop_on_entry = if let NewProcessMode::Launch = &self.mode {
@@ -329,15 +328,13 @@ impl NewProcessModal {
let session_scenario = ZedDebugConfig {
adapter: debugger.to_owned().into(),
label,
request,
request: request,
stop_on_entry,
};
let adapter = cx
.global::<DapRegistry>()
.adapter(&session_scenario.adapter);
cx.spawn(async move |_| adapter?.config_from_zed_format(session_scenario).await.ok())
cx.global::<DapRegistry>()
.adapter(&session_scenario.adapter)
.and_then(|adapter| adapter.config_from_zed_format(session_scenario).ok())
}
fn start_new_session(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -359,7 +356,12 @@ impl NewProcessModal {
// }
// }
let Some(debugger) = self.debugger.clone() else {
let Some(debugger) = self.debugger.as_ref() else {
return;
};
let Some(config) = self.debug_scenario(debugger, cx) else {
log::error!("debug config not found in mode: {}", self.mode);
return;
};
@@ -367,20 +369,11 @@ impl NewProcessModal {
let Some(task_contexts) = self.task_contexts(cx) else {
return;
};
send_telemetry(&config, TelemetrySpawnLocation::Custom, cx);
let task_context = task_contexts.active_context().cloned().unwrap_or_default();
let worktree_id = task_contexts.worktree();
let mode = self.mode;
cx.spawn_in(window, async move |this, cx| {
let Some(config) = this
.update(cx, |this, cx| this.debug_scenario(&debugger, cx))?
.await
else {
bail!("debug config not found in mode: {mode}");
};
debug_panel.update_in(cx, |debug_panel, window, cx| {
send_telemetry(&config, TelemetrySpawnLocation::Custom, cx);
debug_panel.start_session(config, task_context, None, worktree_id, window, cx)
})?;
this.update(cx, |_, cx| {
@@ -593,7 +586,7 @@ impl NewProcessModal {
static SELECT_DEBUGGER_LABEL: SharedString = SharedString::new_static("Select Debugger");
#[derive(Clone, Copy)]
#[derive(Clone)]
pub(crate) enum NewProcessMode {
Task,
Launch,

View File

@@ -816,13 +816,10 @@ impl RunningState {
Self::relativize_paths(None, &mut config, &task_context);
Self::substitute_variables_in_config(&mut config, &task_context);
let request_type = match dap_registry
let request_type = dap_registry
.adapter(&adapter)
.with_context(|| format!("{}: is not a valid adapter name", &adapter)) {
Ok(adapter) => adapter.request_kind(&config).await,
Err(e) => Err(e)
};
.with_context(|| format!("{}: is not a valid adapter name", &adapter))
.and_then(|adapter| adapter.request_kind(&config));
let config_is_valid = request_type.is_ok();
@@ -850,18 +847,9 @@ impl RunningState {
(task, None)
}
};
let Some(task) = task_template.resolve_task_and_check_cwd("debug-build-task", &task_context, cx.background_executor().clone()) else {
let Some(task) = task_template.resolve_task("debug-build-task", &task_context) else {
anyhow::bail!("Could not resolve task variables within a debug scenario");
};
let task = match task.await {
Ok(task) => task,
Err(e) => {
workspace.update(cx, |workspace, cx| {
workspace.show_error(&e, cx);
}).ok();
return Err(e)
}
};
let locator_name = if let Some(locator_name) = locator_name {
debug_assert!(!config_is_valid);
@@ -970,8 +958,8 @@ impl RunningState {
let scenario = dap_registry
.adapter(&adapter)
.with_context(|| anyhow!("{}: is not a valid adapter name", &adapter))?.config_from_zed_format(zed_config)
.await?;
.with_context(|| anyhow!("{}: is not a valid adapter name", &adapter))
.map(|adapter| adapter.config_from_zed_format(zed_config))??;
config = scenario.config;
Self::substitute_variables_in_config(&mut config, &task_context);
} else {
@@ -1024,8 +1012,7 @@ impl RunningState {
None
};
let mut envs: HashMap<String, String> =
self.session.read(cx).task_context().project_env.clone();
let mut envs: HashMap<String, String> = Default::default();
if let Some(Value::Object(env)) = &request.env {
for (key, value) in env {
let value_str = match (key.as_str(), value) {
@@ -1413,7 +1400,9 @@ impl RunningState {
if self.thread_id.is_some_and(|id| id == thread_id) {
return;
}
self.session.update(cx, |this, cx| {
this.set_active_thread(thread_id, cx);
});
self.thread_id = Some(thread_id);
self.stack_frame_list
@@ -1495,18 +1484,6 @@ impl RunningState {
}
pub(crate) fn shutdown(&mut self, cx: &mut Context<Self>) {
self.workspace
.update(cx, |workspace, cx| {
workspace
.project()
.read(cx)
.breakpoint_store()
.update(cx, |store, cx| {
store.remove_active_position(Some(self.session_id), cx)
})
})
.log_err();
self.session.update(cx, |session, cx| {
session.shutdown(cx).detach();
})
@@ -1517,18 +1494,6 @@ impl RunningState {
return;
};
self.workspace
.update(cx, |workspace, cx| {
workspace
.project()
.read(cx)
.breakpoint_store()
.update(cx, |store, cx| {
store.remove_active_position(Some(self.session_id), cx)
})
})
.log_err();
self.session().update(cx, |state, cx| {
state.terminate_threads(Some(vec![thread_id; 1]), cx);
});

View File

@@ -506,48 +506,44 @@ impl LineBreakpoint {
cx.stop_propagation();
})
.end_hover_slot(
h_flex()
.child(
IconButton::new(
SharedString::from(format!(
"breakpoint-ui-on-click-go-to-line-remove-{:?}/{}:{}",
self.dir, self.name, self.line
)),
IconName::Close,
)
.on_click({
let weak = weak.clone();
let path = path.clone();
move |_, _, cx| {
weak.update(cx, |breakpoint_list, cx| {
breakpoint_list.edit_line_breakpoint(
path.clone(),
row,
BreakpointEditAction::Toggle,
cx,
);
})
.ok();
}
})
.tooltip(move |window, cx| {
Tooltip::for_action_in(
"Unset Breakpoint",
&UnsetBreakpoint,
&focus_handle,
window,
IconButton::new(
SharedString::from(format!(
"breakpoint-ui-on-click-go-to-line-remove-{:?}/{}:{}",
self.dir, self.name, self.line
)),
IconName::Close,
)
.on_click({
let weak = weak.clone();
let path = path.clone();
move |_, _, cx| {
weak.update(cx, |breakpoint_list, cx| {
breakpoint_list.edit_line_breakpoint(
path.clone(),
row,
BreakpointEditAction::Toggle,
cx,
)
);
})
.icon_size(ui::IconSize::XSmall),
.ok();
}
})
.tooltip(move |window, cx| {
Tooltip::for_action_in(
"Unset Breakpoint",
&UnsetBreakpoint,
&focus_handle,
window,
cx,
)
.right_4(),
})
.icon_size(ui::IconSize::Indicator),
)
.child(
v_flex()
.py_1()
.gap_1()
.min_h(px(26.))
.min_h(px(22.))
.justify_center()
.id(SharedString::from(format!(
"breakpoint-ui-on-click-go-to-line-{:?}/{}:{}",
@@ -654,7 +650,7 @@ impl ExceptionBreakpoint {
v_flex()
.py_1()
.gap_1()
.min_h(px(26.))
.min_h(px(22.))
.justify_center()
.id(("exception-breakpoint-label", ix))
.child(

View File

@@ -2,17 +2,13 @@ use super::{
stack_frame_list::{StackFrameList, StackFrameListEvent},
variable_list::VariableList,
};
use alacritty_terminal::vte::ansi;
use anyhow::Result;
use collections::HashMap;
use dap::OutputEvent;
use editor::{
BackgroundHighlight, Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId,
};
use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
use fuzzy::StringMatchCandidate;
use gpui::{
Context, Entity, FocusHandle, Focusable, HighlightStyle, Hsla, Render, Subscription, Task,
TextStyle, WeakEntity,
Context, Entity, FocusHandle, Focusable, Render, Subscription, Task, TextStyle, WeakEntity,
};
use language::{Buffer, CodeLabel, ToOffset};
use menu::Confirm;
@@ -21,8 +17,8 @@ use project::{
debugger::session::{CompletionsQuery, OutputToken, Session, SessionEvent},
};
use settings::Settings;
use std::{cell::RefCell, ops::Range, rc::Rc, usize};
use theme::{Theme, ThemeSettings};
use std::{cell::RefCell, rc::Rc, usize};
use theme::ThemeSettings;
use ui::{Divider, prelude::*};
pub struct Console {
@@ -34,8 +30,6 @@ pub struct Console {
stack_frame_list: Entity<StackFrameList>,
last_token: OutputToken,
update_output_task: Task<()>,
ansi_handler: ConsoleHandler,
ansi_processor: ansi::Processor<ansi::StdSyncHandler>,
focus_handle: FocusHandle,
}
@@ -106,8 +100,6 @@ impl Console {
stack_frame_list,
update_output_task: Task::ready(()),
last_token: OutputToken(0),
ansi_handler: Default::default(),
ansi_processor: Default::default(),
focus_handle,
}
}
@@ -143,185 +135,17 @@ impl Console {
window: &mut Window,
cx: &mut App,
) {
let mut to_insert = String::default();
for event in events {
use std::fmt::Write;
_ = write!(to_insert, "{}\n", event.output.trim_end());
}
let len = self.ansi_handler.pos;
self.ansi_processor
.advance(&mut self.ansi_handler, to_insert.as_bytes());
let output = std::mem::take(&mut self.ansi_handler.output);
let mut spans = std::mem::take(&mut self.ansi_handler.spans);
let mut background_spans = std::mem::take(&mut self.ansi_handler.background_spans);
if self.ansi_handler.current_range_start < len + output.len() {
spans.push((
self.ansi_handler.current_range_start..len + output.len(),
self.ansi_handler.current_color,
));
self.ansi_handler.current_range_start = len + output.len();
}
if self.ansi_handler.current_background_range_start < len + output.len() {
background_spans.push((
self.ansi_handler.current_background_range_start..len + output.len(),
self.ansi_handler.current_background_color,
));
self.ansi_handler.current_background_range_start = len + output.len();
}
self.console.update(cx, |console, cx| {
struct ConsoleAnsiHighlight;
let mut to_insert = String::default();
for event in events {
use std::fmt::Write;
_ = write!(to_insert, "{}\n", event.output.trim_end());
}
console.set_read_only(false);
console.move_to_end(&editor::actions::MoveToEnd, window, cx);
console.insert(&output, window, cx);
let buffer = console.buffer().read(cx).snapshot(cx);
let mut highlights = console
.remove_text_highlights::<ConsoleAnsiHighlight>(cx)
.unwrap_or_default();
for (range, color) in spans {
let Some(color) = color else { continue };
let start = range.start + len;
let range = start..range.end + len;
let range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
let style = HighlightStyle {
color: Some(terminal_view::terminal_element::convert_color(
&color,
cx.theme(),
)),
..Default::default()
};
highlights.push((range, style));
}
console.highlight_text::<ConsoleAnsiHighlight>(highlights, cx);
let mut background_highlights = console
.clear_background_highlights::<ConsoleAnsiHighlight>(cx)
.unwrap_or_default();
for (range, color) in background_spans {
let Some(color) = color else { continue };
let start = range.start + len;
let range = start..range.end + len;
let range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
let color_fetcher: fn(&Theme) -> Hsla = match color {
// Named and theme defined colors
ansi::Color::Named(n) => match n {
ansi::NamedColor::Black => |theme| theme.colors().terminal_ansi_black,
ansi::NamedColor::Red => |theme| theme.colors().terminal_ansi_red,
ansi::NamedColor::Green => |theme| theme.colors().terminal_ansi_green,
ansi::NamedColor::Yellow => |theme| theme.colors().terminal_ansi_yellow,
ansi::NamedColor::Blue => |theme| theme.colors().terminal_ansi_blue,
ansi::NamedColor::Magenta => |theme| theme.colors().terminal_ansi_magenta,
ansi::NamedColor::Cyan => |theme| theme.colors().terminal_ansi_cyan,
ansi::NamedColor::White => |theme| theme.colors().terminal_ansi_white,
ansi::NamedColor::BrightBlack => {
|theme| theme.colors().terminal_ansi_bright_black
}
ansi::NamedColor::BrightRed => {
|theme| theme.colors().terminal_ansi_bright_red
}
ansi::NamedColor::BrightGreen => {
|theme| theme.colors().terminal_ansi_bright_green
}
ansi::NamedColor::BrightYellow => {
|theme| theme.colors().terminal_ansi_bright_yellow
}
ansi::NamedColor::BrightBlue => {
|theme| theme.colors().terminal_ansi_bright_blue
}
ansi::NamedColor::BrightMagenta => {
|theme| theme.colors().terminal_ansi_bright_magenta
}
ansi::NamedColor::BrightCyan => {
|theme| theme.colors().terminal_ansi_bright_cyan
}
ansi::NamedColor::BrightWhite => {
|theme| theme.colors().terminal_ansi_bright_white
}
ansi::NamedColor::Foreground => |theme| theme.colors().terminal_foreground,
ansi::NamedColor::Background => |theme| theme.colors().terminal_background,
ansi::NamedColor::Cursor => |theme| theme.players().local().cursor,
ansi::NamedColor::DimBlack => {
|theme| theme.colors().terminal_ansi_dim_black
}
ansi::NamedColor::DimRed => |theme| theme.colors().terminal_ansi_dim_red,
ansi::NamedColor::DimGreen => {
|theme| theme.colors().terminal_ansi_dim_green
}
ansi::NamedColor::DimYellow => {
|theme| theme.colors().terminal_ansi_dim_yellow
}
ansi::NamedColor::DimBlue => |theme| theme.colors().terminal_ansi_dim_blue,
ansi::NamedColor::DimMagenta => {
|theme| theme.colors().terminal_ansi_dim_magenta
}
ansi::NamedColor::DimCyan => |theme| theme.colors().terminal_ansi_dim_cyan,
ansi::NamedColor::DimWhite => {
|theme| theme.colors().terminal_ansi_dim_white
}
ansi::NamedColor::BrightForeground => {
|theme| theme.colors().terminal_bright_foreground
}
ansi::NamedColor::DimForeground => {
|theme| theme.colors().terminal_dim_foreground
}
},
// 'True' colors
ansi::Color::Spec(_) => |theme| theme.colors().editor_background,
// 8 bit, indexed colors
ansi::Color::Indexed(i) => {
match i {
// 0-15 are the same as the named colors above
0 => |theme| theme.colors().terminal_ansi_black,
1 => |theme| theme.colors().terminal_ansi_red,
2 => |theme| theme.colors().terminal_ansi_green,
3 => |theme| theme.colors().terminal_ansi_yellow,
4 => |theme| theme.colors().terminal_ansi_blue,
5 => |theme| theme.colors().terminal_ansi_magenta,
6 => |theme| theme.colors().terminal_ansi_cyan,
7 => |theme| theme.colors().terminal_ansi_white,
8 => |theme| theme.colors().terminal_ansi_bright_black,
9 => |theme| theme.colors().terminal_ansi_bright_red,
10 => |theme| theme.colors().terminal_ansi_bright_green,
11 => |theme| theme.colors().terminal_ansi_bright_yellow,
12 => |theme| theme.colors().terminal_ansi_bright_blue,
13 => |theme| theme.colors().terminal_ansi_bright_magenta,
14 => |theme| theme.colors().terminal_ansi_bright_cyan,
15 => |theme| theme.colors().terminal_ansi_bright_white,
// 16-231 are a 6x6x6 RGB color cube, mapped to 0-255 using steps defined by XTerm.
// See: https://github.com/xterm-x11/xterm-snapshots/blob/master/256colres.pl
// 16..=231 => {
// let (r, g, b) = rgb_for_index(index as u8);
// rgba_color(
// if r == 0 { 0 } else { r * 40 + 55 },
// if g == 0 { 0 } else { g * 40 + 55 },
// if b == 0 { 0 } else { b * 40 + 55 },
// )
// }
// 232-255 are a 24-step grayscale ramp from (8, 8, 8) to (238, 238, 238).
// 232..=255 => {
// let i = index as u8 - 232; // Align index to 0..24
// let value = i * 10 + 8;
// rgba_color(value, value, value)
// }
// For compatibility with the alacritty::Colors interface
// See: https://github.com/alacritty/alacritty/blob/master/alacritty_terminal/src/term/color.rs
_ => |_| gpui::black(),
}
}
};
background_highlights.push(BackgroundHighlight {
range,
color_fetcher,
});
}
console.highlight_background_ranges::<ConsoleAnsiHighlight>(background_highlights, cx);
console.insert(&to_insert, window, cx);
console.set_read_only(true);
cx.notify();
@@ -635,69 +459,3 @@ impl ConsoleQueryBarCompletionProvider {
})
}
}
#[derive(Default)]
struct ConsoleHandler {
output: String,
spans: Vec<(Range<usize>, Option<ansi::Color>)>,
background_spans: Vec<(Range<usize>, Option<ansi::Color>)>,
current_range_start: usize,
current_background_range_start: usize,
current_color: Option<ansi::Color>,
current_background_color: Option<ansi::Color>,
pos: usize,
}
impl ConsoleHandler {
fn break_span(&mut self, color: Option<ansi::Color>) {
self.spans.push((
self.current_range_start..self.output.len(),
self.current_color,
));
self.current_color = color;
self.current_range_start = self.pos;
}
fn break_background_span(&mut self, color: Option<ansi::Color>) {
self.background_spans.push((
self.current_background_range_start..self.output.len(),
self.current_background_color,
));
self.current_background_color = color;
self.current_background_range_start = self.pos;
}
}
impl ansi::Handler for ConsoleHandler {
fn input(&mut self, c: char) {
self.output.push(c);
self.pos += 1;
}
fn linefeed(&mut self) {
self.output.push('\n');
self.pos += 1;
}
fn put_tab(&mut self, count: u16) {
self.output
.extend(std::iter::repeat('\t').take(count as usize));
self.pos += count as usize;
}
fn terminal_attribute(&mut self, attr: ansi::Attr) {
match attr {
ansi::Attr::Foreground(color) => {
self.break_span(Some(color));
}
ansi::Attr::Background(color) => {
self.break_background_span(Some(color));
}
ansi::Attr::Reset => {
self.break_span(None);
self.break_background_span(None);
}
_ => {}
}
}
}

View File

@@ -353,22 +353,10 @@ impl StackFrameList {
state.thread_id.context("No selected thread ID found")
})??;
this.workspace.update(cx, |workspace, cx| {
let breakpoint_store = workspace.project().read(cx).breakpoint_store();
breakpoint_store.update(cx, |store, cx| {
store.set_active_position(
ActiveStackFrame {
session_id: this.session.read(cx).session_id(),
thread_id,
stack_frame_id,
path: abs_path,
position,
},
cx,
);
})
})
this.session.update(cx, |this, cx| {
this.set_active_stack_frame(stack_frame_id, cx);
});
Ok(())
})?
})
}

View File

@@ -110,7 +110,7 @@ async fn test_handle_output_event(executor: BackgroundExecutor, cx: &mut TestApp
client
.fake_event(dap::messages::Events::Output(dap::OutputEvent {
category: Some(dap::OutputEventCategory::Stdout),
output: "\tSecond output line after thread stopped!".to_string(),
output: "Second output line after thread stopped!".to_string(),
data: None,
variables_reference: None,
source: None,
@@ -124,7 +124,7 @@ async fn test_handle_output_event(executor: BackgroundExecutor, cx: &mut TestApp
client
.fake_event(dap::messages::Events::Output(dap::OutputEvent {
category: Some(dap::OutputEventCategory::Console),
output: "\tSecond console output line after thread stopped!".to_string(),
output: "Second console output line after thread stopped!".to_string(),
data: None,
variables_reference: None,
source: None,
@@ -150,7 +150,7 @@ async fn test_handle_output_event(executor: BackgroundExecutor, cx: &mut TestApp
.unwrap();
assert_eq!(
"First console output line before thread stopped!\nFirst output line before thread stopped!\n\tSecond output line after thread stopped!\n\tSecond console output line after thread stopped!\n",
"First console output line before thread stopped!\nFirst output line before thread stopped!\nSecond output line after thread stopped!\nSecond console output line after thread stopped!\n",
active_session_panel.read(cx).running_state().read(cx).console().read(cx).editor().read(cx).text(cx).as_str()
);
})

View File

@@ -14,7 +14,7 @@ use dap::{
},
};
use editor::{
ActiveDebugLine, Editor, EditorMode, MultiBuffer,
Editor, EditorMode, MultiBuffer, StackFrameMemberLine,
actions::{self},
};
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
@@ -1596,7 +1596,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
cx.run_until_parked();
main_editor.update_in(cx, |editor, window, cx| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert_eq!(
active_debug_lines.len(),
@@ -1613,7 +1614,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
});
second_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),
@@ -1671,7 +1673,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
cx.run_until_parked();
second_editor.update_in(cx, |editor, window, cx| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert_eq!(
active_debug_lines.len(),
@@ -1688,7 +1691,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
});
main_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),
@@ -1711,7 +1715,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
cx.run_until_parked();
second_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),
@@ -1720,7 +1725,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
});
main_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),
@@ -1738,7 +1744,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
shutdown_session.await.unwrap();
main_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),
@@ -1747,7 +1754,8 @@ async fn test_active_debug_line_setting(executor: BackgroundExecutor, cx: &mut T
});
second_editor.update(cx, |editor, _| {
let active_debug_lines: Vec<_> = editor.highlighted_rows::<ActiveDebugLine>().collect();
let active_debug_lines: Vec<_> =
editor.highlighted_rows::<StackFrameMemberLine>().collect();
assert!(
active_debug_lines.is_empty(),

View File

@@ -308,7 +308,6 @@ async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppConte
let debug_scenario = adapter
.config_from_zed_format(adapter_specific_config)
.await
.unwrap_or_else(|_| {
panic!(
"Adapter {} should successfully convert from Zed format",
@@ -324,7 +323,6 @@ async fn test_dap_adapter_config_conversion_and_validation(cx: &mut TestAppConte
let request_type = adapter
.request_kind(&debug_scenario.config)
.await
.unwrap_or_else(|_| {
panic!(
"Adapter {} should validate the config successfully",

View File

@@ -344,7 +344,7 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
let snapshot = editor.snapshot(window, cx);
editor
.highlighted_rows::<editor::ActiveDebugLine>()
.highlighted_rows::<editor::StackFrameMemberLine>()
.map(|(range, _)| {
let start = range.start.to_point(&snapshot.buffer_snapshot);
let end = range.end.to_point(&snapshot.buffer_snapshot);
@@ -412,7 +412,7 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
let snapshot = editor.snapshot(window, cx);
editor
.highlighted_rows::<editor::ActiveDebugLine>()
.highlighted_rows::<editor::StackFrameMemberLine>()
.map(|(range, _)| {
let start = range.start.to_point(&snapshot.buffer_snapshot);
let end = range.end.to_point(&snapshot.buffer_snapshot);

View File

@@ -80,7 +80,7 @@ pub trait ToDisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
}
type TextHighlights = TreeMap<TypeId, Vec<(Range<Anchor>, HighlightStyle)>>;
type TextHighlights = TreeMap<TypeId, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
type InlayHighlights = TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
@@ -474,9 +474,11 @@ impl DisplayMap {
pub fn highlight_text(
&mut self,
type_id: TypeId,
ranges: Vec<(Range<Anchor>, HighlightStyle)>,
ranges: Vec<Range<Anchor>>,
style: HighlightStyle,
) {
self.text_highlights.insert(type_id, ranges);
self.text_highlights
.insert(type_id, Arc::new((style, ranges)));
}
pub(crate) fn highlight_inlays(
@@ -498,25 +500,16 @@ impl DisplayMap {
}
}
pub fn text_highlights(&self, type_id: TypeId) -> Option<&[(Range<Anchor>, HighlightStyle)]> {
self.text_highlights
.get(&type_id)
.map(|highlights| highlights.as_slice())
pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> {
let highlights = self.text_highlights.get(&type_id)?;
Some((highlights.0, &highlights.1))
}
pub fn clear_highlights(&mut self, type_id: TypeId) -> bool {
let mut cleared = self.text_highlights.remove(&type_id).is_some();
cleared |= self.inlay_highlights.remove(&type_id).is_some();
cleared
}
pub fn remove_text_highlights(
&mut self,
type_id: TypeId,
) -> Option<Vec<(Range<Anchor>, HighlightStyle)>> {
self.text_highlights.remove(&type_id)
}
pub fn set_font(&self, font: Font, font_size: Pixels, cx: &mut Context<Self>) -> bool {
self.wrap_map
.update(cx, |map, cx| map.set_font_with_size(font, font_size, cx))
@@ -1338,7 +1331,7 @@ impl DisplaySnapshot {
#[cfg(any(test, feature = "test-support"))]
pub fn text_highlight_ranges<Tag: ?Sized + 'static>(
&self,
) -> Option<Vec<(Range<Anchor>, HighlightStyle)>> {
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
let type_id = TypeId::of::<Tag>();
self.text_highlights.get(&type_id).cloned()
}
@@ -2303,17 +2296,12 @@ pub mod tests {
map.highlight_text(
TypeId::of::<usize>(),
vec![
(
buffer_snapshot.anchor_before(Point::new(3, 9))
..buffer_snapshot.anchor_after(Point::new(3, 14)),
red.into(),
),
(
buffer_snapshot.anchor_before(Point::new(3, 17))
..buffer_snapshot.anchor_after(Point::new(3, 18)),
red.into(),
),
buffer_snapshot.anchor_before(Point::new(3, 9))
..buffer_snapshot.anchor_after(Point::new(3, 14)),
buffer_snapshot.anchor_before(Point::new(3, 17))
..buffer_snapshot.anchor_after(Point::new(3, 18)),
],
red.into(),
);
map.insert_blocks(
[BlockProperties {
@@ -2632,13 +2620,11 @@ pub mod tests {
highlighted_ranges
.into_iter()
.map(|range| {
(
buffer_snapshot.anchor_before(range.start)
..buffer_snapshot.anchor_before(range.end),
style,
)
buffer_snapshot.anchor_before(range.start)
..buffer_snapshot.anchor_before(range.end)
})
.collect(),
style,
);
});

View File

@@ -1,16 +1,16 @@
use collections::BTreeMap;
use gpui::HighlightStyle;
use language::Chunk;
use multi_buffer::{MultiBufferChunks, MultiBufferSnapshot, ToOffset as _};
use multi_buffer::{Anchor, MultiBufferChunks, MultiBufferSnapshot, ToOffset as _};
use std::{
any::TypeId,
cmp,
iter::{self, Peekable},
ops::Range,
sync::Arc,
vec,
};
use crate::display_map::TextHighlights;
use sum_tree::TreeMap;
pub struct CustomHighlightsChunks<'a> {
buffer_chunks: MultiBufferChunks<'a>,
@@ -19,15 +19,15 @@ pub struct CustomHighlightsChunks<'a> {
multibuffer_snapshot: &'a MultiBufferSnapshot,
highlight_endpoints: Peekable<vec::IntoIter<HighlightEndpoint>>,
active_highlights: BTreeMap<(TypeId, usize), HighlightStyle>,
text_highlights: Option<&'a TextHighlights>,
active_highlights: BTreeMap<TypeId, HighlightStyle>,
text_highlights: Option<&'a TreeMap<TypeId, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>>,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct HighlightEndpoint {
offset: usize,
is_start: bool,
tag: (TypeId, usize),
tag: TypeId,
style: HighlightStyle,
}
@@ -35,7 +35,7 @@ impl<'a> CustomHighlightsChunks<'a> {
pub fn new(
range: Range<usize>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
text_highlights: Option<&'a TreeMap<TypeId, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>>,
multibuffer_snapshot: &'a MultiBufferSnapshot,
) -> Self {
Self {
@@ -66,7 +66,7 @@ impl<'a> CustomHighlightsChunks<'a> {
fn create_highlight_endpoints(
range: &Range<usize>,
text_highlights: Option<&TextHighlights>,
text_highlights: Option<&TreeMap<TypeId, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>>,
buffer: &MultiBufferSnapshot,
) -> iter::Peekable<vec::IntoIter<HighlightEndpoint>> {
let mut highlight_endpoints = Vec::new();
@@ -74,7 +74,10 @@ fn create_highlight_endpoints(
let start = buffer.anchor_after(range.start);
let end = buffer.anchor_after(range.end);
for (&tag, text_highlights) in text_highlights.iter() {
let start_ix = match text_highlights.binary_search_by(|(probe, _)| {
let style = text_highlights.0;
let ranges = &text_highlights.1;
let start_ix = match ranges.binary_search_by(|probe| {
let cmp = probe.end.cmp(&start, &buffer);
if cmp.is_gt() {
cmp::Ordering::Greater
@@ -85,7 +88,7 @@ fn create_highlight_endpoints(
Ok(i) | Err(i) => i,
};
for (ix, (range, style)) in text_highlights[start_ix..].iter().enumerate() {
for range in &ranges[start_ix..] {
if range.start.cmp(&end, &buffer).is_ge() {
break;
}
@@ -93,14 +96,14 @@ fn create_highlight_endpoints(
highlight_endpoints.push(HighlightEndpoint {
offset: range.start.to_offset(&buffer),
is_start: true,
tag: (tag, ix),
style: *style,
tag,
style,
});
highlight_endpoints.push(HighlightEndpoint {
offset: range.end.to_offset(&buffer),
is_start: false,
tag: (tag, ix),
style: *style,
tag,
style,
});
}
}

View File

@@ -1085,7 +1085,7 @@ mod tests {
use project::{InlayHint, InlayHintLabel, ResolveState};
use rand::prelude::*;
use settings::SettingsStore;
use std::{any::TypeId, cmp::Reverse, env};
use std::{any::TypeId, cmp::Reverse, env, sync::Arc};
use sum_tree::TreeMap;
use text::Patch;
use util::post_inc;
@@ -1593,16 +1593,16 @@ mod tests {
log::info!("highlighting text ranges {text_highlight_ranges:?}");
text_highlights.insert(
TypeId::of::<()>(),
text_highlight_ranges
.into_iter()
.map(|range| {
(
Arc::new((
HighlightStyle::default(),
text_highlight_ranges
.into_iter()
.map(|range| {
buffer_snapshot.anchor_before(range.start)
..buffer_snapshot.anchor_after(range.end),
HighlightStyle::default(),
)
})
.collect(),
..buffer_snapshot.anchor_after(range.end)
})
.collect(),
)),
);
let mut inlay_highlights = InlayHighlights::default();

File diff suppressed because it is too large Load Diff

View File

@@ -13697,7 +13697,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
let mut highlighted_ranges = editor.background_highlights_in_range(
anchor_range(Point::new(3, 4)..Point::new(7, 4)),
&snapshot,
cx.theme(),
cx.theme().colors(),
);
// Enforce a consistent ordering based on color without relying on the ordering of the
// highlight's `TypeId` which is non-executor.
@@ -13727,7 +13727,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
editor.background_highlights_in_range(
anchor_range(Point::new(5, 6)..Point::new(6, 4)),
&snapshot,
cx.theme(),
cx.theme().colors(),
),
&[(
DisplayPoint::new(DisplayRow(6), 3)..DisplayPoint::new(DisplayRow(6), 5),
@@ -19392,10 +19392,8 @@ async fn test_folding_buffer_when_multibuffer_has_only_one_excerpt(cx: &mut Test
let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
let highlight_range = selection_range.clone().to_anchors(&multi_buffer_snapshot);
editor.highlight_text::<TestHighlight>(
vec![(
highlight_range.clone(),
HighlightStyle::color(Hsla::green()),
)],
vec![highlight_range.clone()],
HighlightStyle::color(Hsla::green()),
cx,
);
editor.change_selections(None, window, cx, |s| s.select_ranges(Some(highlight_range)));
@@ -20336,7 +20334,7 @@ async fn test_rename_with_duplicate_edits(cx: &mut TestAppContext) {
let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx));
editor.highlight_background::<DocumentHighlightRead>(
&[highlight_range],
|c| c.colors().editor_document_highlight_read_background,
|c| c.editor_document_highlight_read_background,
cx,
);
});
@@ -20414,7 +20412,7 @@ async fn test_rename_without_prepare(cx: &mut TestAppContext) {
let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx));
editor.highlight_background::<DocumentHighlightRead>(
&[highlight_range],
|c| c.colors().editor_document_highlight_read_background,
|c| c.editor_document_highlight_read_background,
cx,
);
});

View File

@@ -1085,7 +1085,7 @@ impl EditorElement {
let text_hovered = text_hitbox.is_hovered(window);
let gutter_hovered = gutter_hitbox.is_hovered(window);
editor.set_gutter_hovered(gutter_hovered, cx);
editor.show_mouse_cursor(cx);
editor.mouse_cursor_hidden = false;
let point_for_position = position_map.point_for_position(event.position);
let valid_point = point_for_position.previous_valid;
@@ -6105,7 +6105,7 @@ impl EditorElement {
);
}
for (background_highlight_id, background_highlights) in
for (background_highlight_id, (_, background_ranges)) in
background_highlights.iter()
{
let is_search_highlights = *background_highlight_id
@@ -6124,22 +6124,18 @@ impl EditorElement {
if is_symbol_occurrences {
color.fade_out(0.5);
}
let marker_row_ranges =
background_highlights.iter().map(|highlight| {
let display_start = highlight
.range
.start
.to_display_point(&snapshot.display_snapshot);
let display_end = highlight
.range
.end
.to_display_point(&snapshot.display_snapshot);
ColoredRange {
start: display_start.row(),
end: display_end.row(),
color,
}
});
let marker_row_ranges = background_ranges.iter().map(|range| {
let display_start = range
.start
.to_display_point(&snapshot.display_snapshot);
let display_end =
range.end.to_display_point(&snapshot.display_snapshot);
ColoredRange {
start: display_start.row(),
end: display_end.row(),
color,
}
});
marker_quads.extend(
scrollbar_layout
.marker_quads_for_ranges(marker_row_ranges, Some(1)),
@@ -8037,7 +8033,7 @@ impl Element for EditorElement {
editor.read(cx).background_highlights_in_range(
start_anchor..end_anchor,
&snapshot.display_snapshot,
cx.theme(),
cx.theme().colors(),
)
})
.unwrap_or_default();

View File

@@ -19,11 +19,6 @@ pub fn refresh_matching_bracket_highlights(
let snapshot = editor.snapshot(window, cx);
let head = newest_selection.head();
if head > snapshot.buffer_snapshot.len() {
log::error!("bug: cursor offset is out of range while refreshing bracket highlights");
return;
}
let mut tail = head;
if (editor.cursor_shape == CursorShape::Block || editor.cursor_shape == CursorShape::Hollow)
&& head < snapshot.buffer_snapshot.len()
@@ -40,7 +35,7 @@ pub fn refresh_matching_bracket_highlights(
opening_range.to_anchors(&snapshot.buffer_snapshot),
closing_range.to_anchors(&snapshot.buffer_snapshot),
],
|theme| theme.colors().editor_document_highlight_bracket_background,
|theme| theme.editor_document_highlight_bracket_background,
cx,
)
}

View File

@@ -635,7 +635,7 @@ pub fn show_link_definition(
match highlight_range {
RangeInEditor::Text(text_range) => editor
.highlight_text::<HoveredLinkState>(vec![(text_range, style)], cx),
.highlight_text::<HoveredLinkState>(vec![text_range], style, cx),
RangeInEditor::Inlay(highlight) => editor
.highlight_inlays::<HoveredLinkState>(vec![highlight], style, cx),
}
@@ -1403,6 +1403,7 @@ mod tests {
let snapshot = editor.snapshot(window, cx);
let actual_ranges = snapshot
.text_highlight_ranges::<HoveredLinkState>()
.map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default();
assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
@@ -1634,6 +1635,7 @@ mod tests {
.snapshot(window, cx)
.text_highlight_ranges::<HoveredLinkState>()
.unwrap_or_default()
.1
.is_empty()
);
});
@@ -1840,6 +1842,7 @@ mod tests {
.snapshot(window, cx)
.text_highlight_ranges::<HoveredLinkState>()
.unwrap_or_default()
.1
.is_empty()
);
});

View File

@@ -520,7 +520,7 @@ fn show_hover(
// Highlight the selected symbol using a background highlight
editor.highlight_background::<HoverState>(
&hover_highlights,
|theme| theme.colors().element_hover, // todo update theme
|theme| theme.element_hover, // todo update theme
cx,
);
}

View File

@@ -1432,11 +1432,8 @@ impl SearchableItem for Editor {
fn get_matches(&self, _window: &mut Window, _: &mut App) -> Vec<Range<Anchor>> {
self.background_highlights
.get(&TypeId::of::<BufferSearchHighlights>())
.map_or(Vec::new(), |highlights| {
highlights
.iter()
.map(|highlight| highlight.range.clone())
.collect()
.map_or(Vec::new(), |(_color, ranges)| {
ranges.iter().cloned().collect()
})
}
@@ -1455,14 +1452,14 @@ impl SearchableItem for Editor {
_: &mut Window,
cx: &mut Context<Self>,
) {
let existing_ranges = self
let existing_range = self
.background_highlights
.get(&TypeId::of::<BufferSearchHighlights>())
.map(|highlights| highlights.iter().map(|highlight| &highlight.range));
let updated = !existing_ranges.is_some_and(|existing_ranges| existing_ranges.eq(matches));
.map(|(_, range)| range.as_ref());
let updated = existing_range != Some(matches);
self.highlight_background::<BufferSearchHighlights>(
matches,
|theme| theme.colors().search_match_background,
|theme| theme.search_match_background,
cx,
);
if updated {
@@ -1483,12 +1480,7 @@ impl SearchableItem for Editor {
if self.has_filtered_search_ranges() {
self.previous_search_ranges = self
.clear_background_highlights::<SearchWithinRange>(cx)
.map(|highlights| {
highlights
.iter()
.map(|highlight| highlight.range.clone())
.collect()
})
.map(|(_, ranges)| ranges)
}
if !enabled {
@@ -1710,11 +1702,8 @@ impl SearchableItem for Editor {
let search_within_ranges = self
.background_highlights
.get(&TypeId::of::<SearchWithinRange>())
.map_or(vec![], |highlights| {
highlights
.iter()
.map(|highlight| highlight.range.clone())
.collect::<Vec<_>>()
.map_or(vec![], |(_color, ranges)| {
ranges.iter().cloned().collect::<Vec<_>>()
});
cx.background_spawn(async move {

View File

@@ -81,9 +81,9 @@ impl SelectionsCollection {
count
}
/// The non-pending, non-overlapping selections. There could be a pending selection that
/// overlaps these if the mouse is being dragged, etc. This could also be empty if there is a
/// pending selection. Returned as selections over Anchors.
/// The non-pending, non-overlapping selections. There could still be a pending
/// selection that overlaps these if the mouse is being dragged, etc. Returned as
/// selections over Anchors.
pub fn disjoint_anchors(&self) -> Arc<[Selection<Anchor>]> {
self.disjoint.clone()
}
@@ -94,20 +94,6 @@ impl SelectionsCollection {
(0..disjoint.len()).map(move |ix| disjoint[ix].range())
}
/// Non-overlapping selections using anchors, including the pending selection.
pub fn all_anchors(&self, cx: &mut App) -> Arc<[Selection<Anchor>]> {
if self.pending.is_none() {
self.disjoint_anchors()
} else {
let all_offset_selections = self.all::<usize>(cx);
let buffer = self.buffer(cx);
all_offset_selections
.into_iter()
.map(|selection| selection_to_anchor_selection(selection, &buffer))
.collect()
}
}
pub fn pending_anchor(&self) -> Option<Selection<Anchor>> {
self.pending
.as_ref()
@@ -548,11 +534,21 @@ impl<'a> MutableSelectionsCollection<'a> {
}
}
self.collection.disjoint = Arc::from_iter(
selections
.into_iter()
.map(|selection| selection_to_anchor_selection(selection, &buffer)),
);
self.collection.disjoint = Arc::from_iter(selections.into_iter().map(|selection| {
let end_bias = if selection.end > selection.start {
Bias::Left
} else {
Bias::Right
};
Selection {
id: selection.id,
start: buffer.anchor_after(selection.start),
end: buffer.anchor_at(selection.end, end_bias),
reversed: selection.reversed,
goal: selection.goal,
}
}));
self.collection.pending = None;
self.selections_changed = true;
}
@@ -884,27 +880,6 @@ impl DerefMut for MutableSelectionsCollection<'_> {
}
}
fn selection_to_anchor_selection<T>(
selection: Selection<T>,
buffer: &MultiBufferSnapshot,
) -> Selection<Anchor>
where
T: ToOffset + Ord,
{
let end_bias = if selection.end > selection.start {
Bias::Left
} else {
Bias::Right
};
Selection {
id: selection.id,
start: buffer.anchor_after(selection.start),
end: buffer.anchor_at(selection.end, end_bias),
reversed: selection.reversed,
goal: selection.goal,
}
}
// Panics if passed selections are not in order
fn resolve_selections_display<'a>(
selections: impl 'a + IntoIterator<Item = &'a Selection<Anchor>>,

View File

@@ -510,9 +510,10 @@ impl EditorTestContext {
editor
.background_highlights
.get(&TypeId::of::<Tag>())
.into_iter()
.flat_map(|highlights| highlights.as_slice())
.map(|highlight| highlight.range.to_offset(&snapshot.buffer_snapshot))
.map(|h| h.1.clone())
.unwrap_or_default()
.iter()
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect()
});
assert_set_eq!(actual_ranges, expected_ranges);
@@ -524,12 +525,7 @@ impl EditorTestContext {
let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx));
let actual_ranges: Vec<Range<usize>> = snapshot
.text_highlight_ranges::<Tag>()
.map(|ranges| {
ranges
.iter()
.map(|(range, _)| range.clone())
.collect::<Vec<_>>()
})
.map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default()
.into_iter()
.map(|range| range.to_offset(&snapshot.buffer_snapshot))

View File

@@ -1,74 +0,0 @@
use agent_settings::AgentProfileId;
use anyhow::Result;
use async_trait::async_trait;
use crate::example::{Example, ExampleContext, ExampleMetadata, JudgeAssertion};
pub struct FileChangeNotificationExample;
#[async_trait(?Send)]
impl Example for FileChangeNotificationExample {
fn meta(&self) -> ExampleMetadata {
ExampleMetadata {
name: "file_change_notification".to_string(),
url: "https://github.com/octocat/hello-world".to_string(),
revision: "7fd1a60b01f91b314f59955a4e4d4e80d8edf11d".to_string(),
language_server: None,
max_assertions: Some(1),
profile_id: AgentProfileId::default(),
existing_thread_json: None,
max_turns: Some(3),
}
}
async fn conversation(&self, cx: &mut ExampleContext) -> Result<()> {
// Track README so that the model gets notified of its changes
let project_path = cx.agent_thread().read_with(cx, |thread, cx| {
thread
.project()
.read(cx)
.find_project_path("README", cx)
.expect("README file should exist in this repo")
})?;
let buffer = {
cx.agent_thread()
.update(cx, |thread, cx| {
thread
.project()
.update(cx, |project, cx| project.open_buffer(project_path, cx))
})?
.await?
};
cx.agent_thread().update(cx, |thread, cx| {
thread.action_log().update(cx, |action_log, cx| {
action_log.buffer_read(buffer.clone(), cx);
});
})?;
// Start conversation (specific message is not important)
cx.push_user_message("Find all files in this repo");
cx.run_turn().await?;
// Edit the README buffer - the model should get a notification on next turn
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..buffer.len(), "Surprise!")], None, cx);
})?;
// Run for some more turns.
// The model shouldn't thank us for letting it know about the file change.
cx.run_turns(3).await?;
Ok(())
}
fn thread_assertions(&self) -> Vec<JudgeAssertion> {
vec![JudgeAssertion {
id: "change-file-notification".into(),
description:
"Agent should not acknowledge or mention anything about files that have been changed"
.into(),
}]
}
}

View File

@@ -15,7 +15,6 @@ use crate::example::{Example, ExampleContext, ExampleMetadata, JudgeAssertion};
mod add_arg_to_trait_method;
mod code_block_citations;
mod comment_translation;
mod file_change_notification;
mod file_search;
mod grep_params_escapement;
mod overwrite_file;
@@ -29,7 +28,6 @@ pub fn all(examples_dir: &Path) -> Vec<Rc<dyn Example>> {
Rc::new(planets::Planets),
Rc::new(comment_translation::CommentTranslation),
Rc::new(overwrite_file::FileOverwriteExample),
Rc::new(file_change_notification::FileChangeNotificationExample),
Rc::new(grep_params_escapement::GrepParamsEscapementExample),
];

View File

@@ -367,13 +367,7 @@ impl ExampleInstance {
});
})?;
let mut example_cx = ExampleContext::new(
meta.clone(),
this.log_prefix.clone(),
thread.clone(),
model.clone(),
cx.clone(),
);
let mut example_cx = ExampleContext::new(meta.clone(), this.log_prefix.clone(), thread.clone(), model.clone(), cx.clone());
let result = this.thread.conversation(&mut example_cx).await;
if let Err(err) = result {

View File

@@ -14,7 +14,6 @@ use fs::normalize_path;
use gpui::{App, Task};
use language::LanguageName;
use semantic_version::SemanticVersion;
use task::{SpawnInTerminal, ZedDebugConfig};
pub use crate::extension_events::*;
pub use crate::extension_host_proxy::*;
@@ -145,26 +144,7 @@ pub trait Extension: Send + Sync + 'static {
worktree: Arc<dyn WorktreeDelegate>,
) -> Result<DebugAdapterBinary>;
async fn dap_request_kind(
&self,
dap_name: Arc<str>,
config: serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest>;
async fn dap_config_to_scenario(&self, config: ZedDebugConfig) -> Result<DebugScenario>;
async fn dap_locator_create_scenario(
&self,
locator_name: String,
build_config_template: BuildTaskTemplate,
resolved_label: String,
debug_adapter_name: String,
) -> Result<Option<DebugScenario>>;
async fn run_dap_locator(
&self,
locator_name: String,
config: SpawnInTerminal,
) -> Result<DebugRequest>;
async fn get_dap_schema(&self) -> Result<serde_json::Value>;
}
pub fn parse_wasm_extension_version(

View File

@@ -12,7 +12,6 @@ use std::{
env, fs, mem,
path::{Path, PathBuf},
process::Stdio,
str::FromStr,
sync::Arc,
};
use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _};
@@ -98,22 +97,6 @@ impl ExtensionBuilder {
log::info!("compiled Rust extension {}", extension_dir.display());
}
for (debug_adapter_name, meta) in &mut extension_manifest.debug_adapters {
let debug_adapter_relative_schema_path =
meta.schema_path.clone().unwrap_or_else(|| {
Path::new("debug_adapter_schemas")
.join(Path::new(debug_adapter_name.as_ref()).with_extension("json"))
});
let debug_adapter_schema_path = extension_dir.join(debug_adapter_relative_schema_path);
let debug_adapter_schema = fs::read_to_string(&debug_adapter_schema_path)
.with_context(|| {
format!("failed to read debug adapter schema for `{debug_adapter_name}` from `{debug_adapter_schema_path:?}`")
})?;
_ = serde_json::Value::from_str(&debug_adapter_schema).with_context(|| {
format!("Debug adapter schema for `{debug_adapter_name}` (path: `{debug_adapter_schema_path:?}`) is not a valid JSON")
})?;
}
for (grammar_name, grammar_metadata) in &extension_manifest.grammars {
let snake_cased_grammar_name = grammar_name.to_snake_case();
if grammar_name.as_ref() != snake_cased_grammar_name.as_str() {

View File

@@ -1,4 +1,4 @@
use std::path::{Path, PathBuf};
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Result;
@@ -411,50 +411,15 @@ impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy {
}
pub trait ExtensionDebugAdapterProviderProxy: Send + Sync + 'static {
fn register_debug_adapter(
&self,
extension: Arc<dyn Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
);
fn register_debug_locator(&self, extension: Arc<dyn Extension>, locator_name: Arc<str>);
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>);
fn unregister_debug_locator(&self, locator_name: Arc<str>);
fn register_debug_adapter(&self, extension: Arc<dyn Extension>, debug_adapter_name: Arc<str>);
}
impl ExtensionDebugAdapterProviderProxy for ExtensionHostProxy {
fn register_debug_adapter(
&self,
extension: Arc<dyn Extension>,
debug_adapter_name: Arc<str>,
schema_path: &Path,
) {
fn register_debug_adapter(&self, extension: Arc<dyn Extension>, debug_adapter_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.register_debug_adapter(extension, debug_adapter_name, schema_path)
}
fn register_debug_locator(&self, extension: Arc<dyn Extension>, locator_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.register_debug_locator(extension, locator_name)
}
fn unregister_debug_adapter(&self, debug_adapter_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.unregister_debug_adapter(debug_adapter_name)
}
fn unregister_debug_locator(&self, locator_name: Arc<str>) {
let Some(proxy) = self.debug_adapter_provider_proxy.read().clone() else {
return;
};
proxy.unregister_debug_locator(locator_name)
proxy.register_debug_adapter(extension, debug_adapter_name)
}
}

View File

@@ -88,9 +88,7 @@ pub struct ExtensionManifest {
#[serde(default)]
pub capabilities: Vec<ExtensionCapability>,
#[serde(default)]
pub debug_adapters: BTreeMap<Arc<str>, DebugAdapterManifestEntry>,
#[serde(default)]
pub debug_locators: BTreeMap<Arc<str>, DebugLocatorManifestEntry>,
pub debug_adapters: Vec<Arc<str>>,
}
impl ExtensionManifest {
@@ -210,14 +208,6 @@ pub struct SlashCommandManifestEntry {
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct IndexedDocsProviderEntry {}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct DebugAdapterManifestEntry {
pub schema_path: Option<PathBuf>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct DebugLocatorManifestEntry {}
impl ExtensionManifest {
pub async fn load(fs: Arc<dyn Fs>, extension_dir: &Path) -> Result<Self> {
let extension_name = extension_dir
@@ -286,8 +276,7 @@ fn manifest_from_old_manifest(
indexed_docs_providers: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
debug_adapters: Default::default(),
debug_locators: Default::default(),
debug_adapters: vec![],
}
}
@@ -316,7 +305,6 @@ mod tests {
snippets: None,
capabilities: vec![],
debug_adapters: Default::default(),
debug_locators: Default::default(),
}
}

View File

@@ -2,7 +2,4 @@ pub use dap::{
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
adapters::{DebugAdapterBinary, DebugTaskDefinition, TcpArguments},
};
pub use task::{
AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, LaunchRequest,
TaskTemplate as BuildTaskTemplate, TcpArgumentsTemplate,
};
pub use task::{AttachRequest, DebugRequest, LaunchRequest, TcpArgumentsTemplate};

View File

@@ -20,10 +20,9 @@ pub use wit::{
make_file_executable,
zed::extension::context_server::ContextServerConfiguration,
zed::extension::dap::{
AttachRequest, BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, BuildTaskTemplate,
DebugAdapterBinary, DebugConfig, DebugRequest, DebugScenario, DebugTaskDefinition,
LaunchRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
TaskTemplate, TcpArguments, TcpArgumentsTemplate, resolve_tcp_template,
DebugAdapterBinary, DebugTaskDefinition, StartDebuggingRequestArguments,
StartDebuggingRequestArgumentsRequest, TcpArguments, TcpArgumentsTemplate,
resolve_tcp_template,
},
zed::extension::github::{
GithubRelease, GithubReleaseAsset, GithubReleaseOptions, github_release_by_tag_name,
@@ -199,65 +198,14 @@ pub trait Extension: Send + Sync {
&mut self,
_adapter_name: String,
_config: DebugTaskDefinition,
_user_provided_debug_adapter_path: Option<String>,
_user_provided_path: Option<String>,
_worktree: &Worktree,
) -> Result<DebugAdapterBinary, String> {
Err("`get_dap_binary` not implemented".to_string())
}
/// Determines whether the specified adapter configuration should *launch* a new debuggee process
/// or *attach* to an existing one. This function should not perform any further validation (outside of determining the kind of a request).
/// This function should return an error when the kind cannot be determined (rather than fall back to a known default).
fn dap_request_kind(
&mut self,
_adapter_name: String,
_config: serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest, String> {
Err("`dap_request_kind` not implemented".to_string())
}
/// Converts a high-level definition of a debug scenario (originating in a new session UI) to a "low-level" configuration suitable for a particular adapter.
///
/// In layman's terms: given a program, list of arguments, current working directory and environment variables,
/// create a configuration that can be used to start a debug session.
fn dap_config_to_scenario(&mut self, _config: DebugConfig) -> Result<DebugScenario, String> {
Err("`dap_config_to_scenario` not implemented".to_string())
}
/// Locators are entities that convert a Zed task into a debug scenario.
///
/// They can be provided even by extensions that don't provide a debug adapter.
/// For all tasks applicable to a given buffer, Zed will query all locators to find one that can turn the task into a debug scenario.
/// A converted debug scenario can include a build task (it shouldn't contain any configuration in such case); a build task result will later
/// be resolved with [`Extension::run_dap_locator`].
///
/// To work through a real-world example, take a `cargo run` task and a hypothetical `cargo` locator:
/// 1. We may need to modify the task; in this case, it is problematic that `cargo run` spawns a binary. We should turn `cargo run` into a debug scenario with
/// `cargo build` task. This is the decision we make at `dap_locator_create_scenario` scope.
/// 2. Then, after the build task finishes, we will run `run_dap_locator` of the locator that produced the build task to find the program to be debugged. This function
/// should give us a debugger-agnostic configuration for launching a debug target (that we end up resolving with [`Extension::dap_config_to_scenario`]). It's almost as if the user
/// found the artifact path by themselves.
///
/// Note that you're not obliged to use build tasks with locators. Specifically, it is sufficient to provide a debug configuration directly in the return value of
/// `dap_locator_create_scenario` if you're able to do that. Make sure to not fill out `build` field in that case, as that will prevent Zed from running second phase of resolution in such case.
/// This might be of particular relevance to interpreted languages.
fn dap_locator_create_scenario(
&mut self,
_locator_name: String,
_build_task: TaskTemplate,
_resolved_label: String,
_debug_adapter_name: String,
) -> Option<DebugScenario> {
None
}
/// Runs the second phase of locator resolution.
/// See [`Extension::dap_locator_create_scenario`] for a hefty comment on locators.
fn run_dap_locator(
&mut self,
_locator_name: String,
_build_task: TaskTemplate,
) -> Result<DebugRequest, String> {
Err("`run_dap_locator` not implemented".to_string())
fn dap_schema(&mut self) -> Result<serde_json::Value, String> {
Err("`dap_schema` not implemented".to_string())
}
}
@@ -453,36 +401,8 @@ impl wit::Guest for Component {
extension().get_dap_binary(adapter_name, config, user_installed_path, worktree)
}
fn dap_request_kind(
adapter_name: String,
config: String,
) -> Result<StartDebuggingRequestArgumentsRequest, String> {
extension().dap_request_kind(
adapter_name,
serde_json::from_str(&config).map_err(|e| format!("Failed to parse config: {e}"))?,
)
}
fn dap_config_to_scenario(config: DebugConfig) -> Result<DebugScenario, String> {
extension().dap_config_to_scenario(config)
}
fn dap_locator_create_scenario(
locator_name: String,
build_task: TaskTemplate,
resolved_label: String,
debug_adapter_name: String,
) -> Option<DebugScenario> {
extension().dap_locator_create_scenario(
locator_name,
build_task,
resolved_label,
debug_adapter_name,
)
}
fn run_dap_locator(
locator_name: String,
build_task: TaskTemplate,
) -> Result<DebugRequest, String> {
extension().run_dap_locator(locator_name, build_task)
fn dap_schema() -> Result<String, String> {
extension().dap_schema().map(|schema| schema.to_string())
}
}

View File

@@ -32,55 +32,10 @@ interface dap {
timeout: option<u64>,
}
/// Debug Config is the "highest-level" configuration for a debug session.
/// It comes from a new session modal UI; thus, it is essentially debug-adapter-agnostic.
/// It is expected of the extension to translate this generic configuration into something that can be debugged by the adapter (debug scenario).
record debug-config {
/// Name of the debug task
record debug-task-definition {
label: string,
/// The debug adapter to use
adapter: string,
request: debug-request,
stop-on-entry: option<bool>,
}
record task-template {
/// Human readable name of the task to display in the UI.
label: string,
/// Executable command to spawn.
command: string,
args: list<string>,
env: env-vars,
cwd: option<string>,
}
/// A task template with substituted task variables.
type resolved-task = task-template;
/// A task template for building a debug target.
type build-task-template = task-template;
variant build-task-definition {
by-name(string),
template(build-task-definition-template-payload )
}
record build-task-definition-template-payload {
locator-name: option<string>,
template: build-task-template
}
/// Debug Scenario is the user-facing configuration type (used in debug.json). It is still concerned with what to debug and not necessarily how to do it (except for any
/// debug-adapter-specific configuration options).
record debug-scenario {
/// Unsubstituted label for the task.DebugAdapterBinary
label: string,
/// Name of the Debug Adapter this configuration is intended for.
adapter: string,
/// An optional build step to be ran prior to starting a debug session. Build steps are used by Zed's locators to locate the executable to debug.
build: option<build-task-definition>,
/// JSON-encoded configuration for a given debug adapter.
config: string,
/// TCP connection parameters (if they were specified by user)
tcp-connection: option<tcp-arguments-template>,
}
@@ -89,34 +44,16 @@ interface dap {
attach,
}
record debug-task-definition {
/// Unsubstituted label for the task.DebugAdapterBinary
label: string,
/// Name of the Debug Adapter this configuration is intended for.
adapter: string,
/// JSON-encoded configuration for a given debug adapter.
config: string,
/// TCP connection parameters (if they were specified by user)
tcp-connection: option<tcp-arguments-template>,
}
record start-debugging-request-arguments {
/// JSON-encoded configuration for a given debug adapter. It is specific to each debug adapter.
/// `configuration` will have it's Zed variable references substituted prior to being passed to the debug adapter.
configuration: string,
request: start-debugging-request-arguments-request,
}
/// The lowest-level representation of a debug session, which specifies:
/// - How to start a debug adapter process
/// - How to start a debug session with it (using DAP protocol)
/// for a given debug scenario.
record debug-adapter-binary {
command: option<string>,
arguments: list<string>,
envs: env-vars,
cwd: option<string>,
/// Zed will use TCP transport if `connection` is specified.
connection: option<tcp-arguments>,
request-args: start-debugging-request-arguments
}

View File

@@ -11,7 +11,7 @@ world extension {
use common.{env-vars, range};
use context-server.{context-server-configuration};
use dap.{attach-request, build-task-template, debug-config, debug-adapter-binary, debug-task-definition, debug-request, debug-scenario, launch-request, resolved-task, start-debugging-request-arguments-request};
use dap.{debug-adapter-binary, debug-task-definition, debug-request};
use lsp.{completion, symbol};
use process.{command};
use slash-command.{slash-command, slash-command-argument-completion, slash-command-output};
@@ -159,9 +159,6 @@ world extension {
/// Returns a configured debug adapter binary for a given debug task.
export get-dap-binary: func(adapter-name: string, config: debug-task-definition, user-installed-path: option<string>, worktree: borrow<worktree>) -> result<debug-adapter-binary, string>;
/// Returns the kind of a debug scenario (launch or attach).
export dap-request-kind: func(adapter-name: string, config: string) -> result<start-debugging-request-arguments-request, string>;
export dap-config-to-scenario: func(config: debug-config) -> result<debug-scenario, string>;
export dap-locator-create-scenario: func(locator-name: string, build-config-template: build-task-template, resolved-label: string, debug-adapter-name: string) -> option<debug-scenario>;
export run-dap-locator: func(locator-name: string, config: resolved-task) -> result<debug-request, string>;
/// Get a debug adapter's configuration schema
export dap-schema: func() -> result<string, string>;
}

View File

@@ -139,7 +139,6 @@ fn manifest() -> ExtensionManifest {
args: vec!["hello!".into()],
}],
debug_adapters: Default::default(),
debug_locators: Default::default(),
}
}

View File

@@ -1149,12 +1149,6 @@ impl ExtensionStore {
for (server_id, _) in extension.manifest.context_servers.iter() {
self.proxy.unregister_context_server(server_id.clone(), cx);
}
for (adapter, _) in extension.manifest.debug_adapters.iter() {
self.proxy.unregister_debug_adapter(adapter.clone());
}
for (locator, _) in extension.manifest.debug_locators.iter() {
self.proxy.unregister_debug_locator(locator.clone());
}
}
self.wasm_extensions
@@ -1350,26 +1344,9 @@ impl ExtensionStore {
.register_indexed_docs_provider(extension.clone(), provider_id.clone());
}
for (debug_adapter, meta) in &manifest.debug_adapters {
let mut path = root_dir.clone();
path.push(Path::new(manifest.id.as_ref()));
if let Some(schema_path) = &meta.schema_path {
path.push(schema_path);
} else {
path.push("debug_adapter_schemas");
path.push(Path::new(debug_adapter.as_ref()).with_extension("json"));
}
this.proxy.register_debug_adapter(
extension.clone(),
debug_adapter.clone(),
&path,
);
}
for debug_adapter in manifest.debug_locators.keys() {
for debug_adapter in &manifest.debug_adapters {
this.proxy
.register_debug_locator(extension.clone(), debug_adapter.clone());
.register_debug_adapter(extension.clone(), debug_adapter.clone());
}
}

View File

@@ -163,7 +163,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
snippets: None,
capabilities: Vec::new(),
debug_adapters: Default::default(),
debug_locators: Default::default(),
}),
dev: false,
},
@@ -194,7 +193,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
snippets: None,
capabilities: Vec::new(),
debug_adapters: Default::default(),
debug_locators: Default::default(),
}),
dev: false,
},
@@ -370,7 +368,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
snippets: None,
capabilities: Vec::new(),
debug_adapters: Default::default(),
debug_locators: Default::default(),
}),
dev: false,
},

View File

@@ -3,7 +3,6 @@ pub mod wit;
use crate::ExtensionManifest;
use anyhow::{Context as _, Result, anyhow, bail};
use async_trait::async_trait;
use dap::{DebugRequest, StartDebuggingRequestArgumentsRequest};
use extension::{
CodeLabel, Command, Completion, ContextServerConfiguration, DebugAdapterBinary,
DebugTaskDefinition, ExtensionHostProxy, KeyValueStoreDelegate, ProjectDelegate, SlashCommand,
@@ -33,7 +32,6 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig};
use wasmtime::{
CacheStore, Engine, Store,
component::{Component, ResourceTable},
@@ -401,71 +399,14 @@ impl extension::Extension for WasmExtension {
})
.await
}
async fn dap_request_kind(
&self,
dap_name: Arc<str>,
config: serde_json::Value,
) -> Result<StartDebuggingRequestArgumentsRequest> {
self.call(|extension, store| {
async move {
let kind = extension
.call_dap_request_kind(store, dap_name, config)
.await?
.map_err(|err| store.data().extension_error(err))?;
Ok(kind.into())
}
.boxed()
})
.await
}
async fn dap_config_to_scenario(&self, config: ZedDebugConfig) -> Result<DebugScenario> {
self.call(|extension, store| {
async move {
let kind = extension
.call_dap_config_to_scenario(store, config)
.await?
.map_err(|err| store.data().extension_error(err))?;
Ok(kind)
}
.boxed()
})
.await
}
async fn dap_locator_create_scenario(
&self,
locator_name: String,
build_config_template: TaskTemplate,
resolved_label: String,
debug_adapter_name: String,
) -> Result<Option<DebugScenario>> {
async fn get_dap_schema(&self) -> Result<serde_json::Value> {
self.call(|extension, store| {
async move {
extension
.call_dap_locator_create_scenario(
store,
locator_name,
build_config_template,
resolved_label,
debug_adapter_name,
)
.call_dap_schema(store)
.await
}
.boxed()
})
.await
}
async fn run_dap_locator(
&self,
locator_name: String,
config: SpawnInTerminal,
) -> Result<DebugRequest> {
self.call(|extension, store| {
async move {
extension
.call_run_dap_locator(store, locator_name, config)
.await?
.and_then(|schema| serde_json::to_value(schema).map_err(|err| err.to_string()))
.map_err(|err| store.data().extension_error(err))
}
.boxed()

View File

@@ -7,14 +7,10 @@ mod since_v0_3_0;
mod since_v0_4_0;
mod since_v0_5_0;
mod since_v0_6_0;
use dap::DebugRequest;
use extension::{DebugTaskDefinition, KeyValueStoreDelegate, WorktreeDelegate};
use language::LanguageName;
use lsp::LanguageServerName;
use release_channel::ReleaseChannel;
use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig};
use crate::wasm_host::wit::since_v0_6_0::dap::StartDebuggingRequestArgumentsRequest;
use super::{WasmState, wasm_engine};
use anyhow::{Context as _, Result, anyhow};
@@ -926,87 +922,18 @@ impl Extension {
_ => anyhow::bail!("`get_dap_binary` not available prior to v0.6.0"),
}
}
pub async fn call_dap_request_kind(
&self,
store: &mut Store<WasmState>,
adapter_name: Arc<str>,
config: serde_json::Value,
) -> Result<Result<StartDebuggingRequestArgumentsRequest, String>> {
pub async fn call_dap_schema(&self, store: &mut Store<WasmState>) -> Result<String, String> {
match self {
Extension::V0_6_0(ext) => {
let config =
serde_json::to_string(&config).context("Adapter config is not a valid JSON")?;
let dap_binary = ext
.call_dap_request_kind(store, &adapter_name, &config)
.await?
.map_err(|e| anyhow!("{e:?}"))?;
let schema = ext
.call_dap_schema(store)
.await
.map_err(|err| err.to_string())?;
Ok(Ok(dap_binary))
schema
}
_ => anyhow::bail!("`dap_request_kind` not available prior to v0.6.0"),
}
}
pub async fn call_dap_config_to_scenario(
&self,
store: &mut Store<WasmState>,
config: ZedDebugConfig,
) -> Result<Result<DebugScenario, String>> {
match self {
Extension::V0_6_0(ext) => {
let config = config.into();
let dap_binary = ext
.call_dap_config_to_scenario(store, &config)
.await?
.map_err(|e| anyhow!("{e:?}"))?;
Ok(Ok(dap_binary.try_into()?))
}
_ => anyhow::bail!("`dap_config_to_scenario` not available prior to v0.6.0"),
}
}
pub async fn call_dap_locator_create_scenario(
&self,
store: &mut Store<WasmState>,
locator_name: String,
build_config_template: TaskTemplate,
resolved_label: String,
debug_adapter_name: String,
) -> Result<Option<DebugScenario>> {
match self {
Extension::V0_6_0(ext) => {
let build_config_template = build_config_template.into();
let dap_binary = ext
.call_dap_locator_create_scenario(
store,
&locator_name,
&build_config_template,
&resolved_label,
&debug_adapter_name,
)
.await?;
Ok(dap_binary.map(TryInto::try_into).transpose()?)
}
_ => anyhow::bail!("`dap_locator_create_scenario` not available prior to v0.6.0"),
}
}
pub async fn call_run_dap_locator(
&self,
store: &mut Store<WasmState>,
locator_name: String,
resolved_build_task: SpawnInTerminal,
) -> Result<Result<DebugRequest, String>> {
match self {
Extension::V0_6_0(ext) => {
let build_config_template = resolved_build_task.into();
let dap_request = ext
.call_run_dap_locator(store, &locator_name, &build_config_template)
.await?
.map_err(|e| anyhow!("{e:?}"))?;
Ok(Ok(dap_request.into()))
}
_ => anyhow::bail!("`dap_locator_create_scenario` not available prior to v0.6.0"),
_ => Err("`get_dap_binary` not available prior to v0.6.0".to_string()),
}
}
}

View File

@@ -1,7 +1,7 @@
use crate::wasm_host::wit::since_v0_6_0::{
dap::{
AttachRequest, BuildTaskDefinition, BuildTaskDefinitionTemplatePayload, LaunchRequest,
StartDebuggingRequestArguments, TcpArguments, TcpArgumentsTemplate,
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest, TcpArguments,
TcpArgumentsTemplate,
},
slash_command::SlashCommandOutputSection,
};
@@ -18,7 +18,6 @@ use extension::{
};
use futures::{AsyncReadExt, lock::Mutex};
use futures::{FutureExt as _, io::BufReader};
use gpui::SharedString;
use language::{BinaryStatus, LanguageName, language_settings::AllLanguageSettings};
use project::project_settings::ProjectSettings;
use semantic_version::SemanticVersion;
@@ -26,10 +25,8 @@ use std::{
env,
net::Ipv4Addr,
path::{Path, PathBuf},
str::FromStr,
sync::{Arc, OnceLock},
};
use task::{SpawnInTerminal, ZedDebugConfig};
use util::{archive::extract_zip, maybe};
use wasmtime::component::{Linker, Resource};
@@ -122,16 +119,6 @@ impl From<extension::TcpArgumentsTemplate> for TcpArgumentsTemplate {
}
}
impl From<TcpArgumentsTemplate> for extension::TcpArgumentsTemplate {
fn from(value: TcpArgumentsTemplate) -> Self {
Self {
host: value.host.map(Ipv4Addr::from_bits),
port: value.port,
timeout: value.timeout,
}
}
}
impl TryFrom<extension::DebugTaskDefinition> for DebugTaskDefinition {
type Error = anyhow::Error;
fn try_from(value: extension::DebugTaskDefinition) -> Result<Self, Self::Error> {
@@ -144,71 +131,6 @@ impl TryFrom<extension::DebugTaskDefinition> for DebugTaskDefinition {
}
}
impl From<task::DebugRequest> for DebugRequest {
fn from(value: task::DebugRequest) -> Self {
match value {
task::DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()),
task::DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()),
}
}
}
impl From<DebugRequest> for task::DebugRequest {
fn from(value: DebugRequest) -> Self {
match value {
DebugRequest::Launch(launch_request) => Self::Launch(launch_request.into()),
DebugRequest::Attach(attach_request) => Self::Attach(attach_request.into()),
}
}
}
impl From<task::LaunchRequest> for LaunchRequest {
fn from(value: task::LaunchRequest) -> Self {
Self {
program: value.program,
cwd: value.cwd.map(|p| p.to_string_lossy().into_owned()),
args: value.args,
envs: value.env.into_iter().collect(),
}
}
}
impl From<task::AttachRequest> for AttachRequest {
fn from(value: task::AttachRequest) -> Self {
Self {
process_id: value.process_id,
}
}
}
impl From<LaunchRequest> for task::LaunchRequest {
fn from(value: LaunchRequest) -> Self {
Self {
program: value.program,
cwd: value.cwd.map(|p| p.into()),
args: value.args,
env: value.envs.into_iter().collect(),
}
}
}
impl From<AttachRequest> for task::AttachRequest {
fn from(value: AttachRequest) -> Self {
Self {
process_id: value.process_id,
}
}
}
impl From<ZedDebugConfig> for DebugConfig {
fn from(value: ZedDebugConfig) -> Self {
Self {
label: value.label.into(),
adapter: value.adapter.into(),
request: value.request.into(),
stop_on_entry: value.stop_on_entry,
}
}
}
impl TryFrom<DebugAdapterBinary> for extension::DebugAdapterBinary {
type Error = anyhow::Error;
fn try_from(value: DebugAdapterBinary) -> Result<Self, Self::Error> {
@@ -223,94 +145,6 @@ impl TryFrom<DebugAdapterBinary> for extension::DebugAdapterBinary {
}
}
impl From<BuildTaskDefinition> for extension::BuildTaskDefinition {
fn from(value: BuildTaskDefinition) -> Self {
match value {
BuildTaskDefinition::ByName(name) => Self::ByName(name.into()),
BuildTaskDefinition::Template(build_task_template) => Self::Template {
task_template: build_task_template.template.into(),
locator_name: build_task_template.locator_name.map(SharedString::from),
},
}
}
}
impl From<extension::BuildTaskDefinition> for BuildTaskDefinition {
fn from(value: extension::BuildTaskDefinition) -> Self {
match value {
extension::BuildTaskDefinition::ByName(name) => Self::ByName(name.into()),
extension::BuildTaskDefinition::Template {
task_template,
locator_name,
} => Self::Template(BuildTaskDefinitionTemplatePayload {
template: task_template.into(),
locator_name: locator_name.map(String::from),
}),
}
}
}
impl From<BuildTaskTemplate> for extension::BuildTaskTemplate {
fn from(value: BuildTaskTemplate) -> Self {
Self {
label: value.label,
command: value.command,
args: value.args,
env: value.env.into_iter().collect(),
cwd: value.cwd,
..Default::default()
}
}
}
impl From<extension::BuildTaskTemplate> for BuildTaskTemplate {
fn from(value: extension::BuildTaskTemplate) -> Self {
Self {
label: value.label,
command: value.command,
args: value.args,
env: value.env.into_iter().collect(),
cwd: value.cwd,
}
}
}
impl TryFrom<DebugScenario> for extension::DebugScenario {
type Error = anyhow::Error;
fn try_from(value: DebugScenario) -> std::result::Result<Self, Self::Error> {
Ok(Self {
adapter: value.adapter.into(),
label: value.label.into(),
build: value.build.map(Into::into),
config: serde_json::Value::from_str(&value.config)?,
tcp_connection: value.tcp_connection.map(Into::into),
})
}
}
impl From<extension::DebugScenario> for DebugScenario {
fn from(value: extension::DebugScenario) -> Self {
Self {
adapter: value.adapter.into(),
label: value.label.into(),
build: value.build.map(Into::into),
config: value.config.to_string(),
tcp_connection: value.tcp_connection.map(Into::into),
}
}
}
impl From<SpawnInTerminal> for ResolvedTask {
fn from(value: SpawnInTerminal) -> Self {
Self {
label: value.label,
command: value.command,
args: value.args,
env: value.env.into_iter().collect(),
cwd: value.cwd.map(|s| s.to_string_lossy().into_owned()),
}
}
}
impl From<CodeLabel> for extension::CodeLabel {
fn from(value: CodeLabel) -> Self {
Self {
@@ -938,33 +772,24 @@ impl ExtensionImports for WasmState {
})?)
}
"context_servers" => {
let settings = key
let configuration = key
.and_then(|key| {
ProjectSettings::get(location, cx)
.context_servers
.get(key.as_str())
})
.cloned()
.context("Failed to get context server configuration")?;
match settings {
project::project_settings::ContextServerSettings::Custom {
command,
} => Ok(serde_json::to_string(&settings::ContextServerSettings {
command: Some(settings::CommandSettings {
.unwrap_or_default();
Ok(serde_json::to_string(&settings::ContextServerSettings {
command: configuration.command.map(|command| {
settings::CommandSettings {
path: Some(command.path),
arguments: Some(command.args),
env: command.env.map(|env| env.into_iter().collect()),
}),
settings: None,
})?),
project::project_settings::ContextServerSettings::Extension {
settings,
} => Ok(serde_json::to_string(&settings::ContextServerSettings {
command: None,
settings: Some(settings),
})?),
}
}
}),
settings: configuration.settings,
})?)
}
_ => {
bail!("Unknown settings category: {}", category);

View File

@@ -647,60 +647,44 @@ pub(super) unsafe fn read_fd(mut fd: filedescriptor::FileDescriptor) -> Result<V
Ok(buffer)
}
#[cfg(any(feature = "wayland", feature = "x11"))]
pub(super) const DEFAULT_CURSOR_ICON_NAME: &str = "left_ptr";
impl CursorStyle {
#[cfg(any(feature = "wayland", feature = "x11"))]
pub(super) fn to_icon_names(&self) -> &'static [&'static str] {
// Based on cursor names from chromium:
// https://github.com/chromium/chromium/blob/d3069cf9c973dc3627fa75f64085c6a86c8f41bf/ui/base/cursor/cursor_factory.cc#L113
pub(super) fn to_icon_name(&self) -> &'static str {
// Based on cursor names from https://gitlab.gnome.org/GNOME/adwaita-icon-theme (GNOME)
// and https://github.com/KDE/breeze (KDE). Both of them seem to be also derived from
// Web CSS cursor names: https://developer.mozilla.org/en-US/docs/Web/CSS/cursor#values
match self {
CursorStyle::Arrow => &[DEFAULT_CURSOR_ICON_NAME],
CursorStyle::IBeam => &["text", "xterm"],
CursorStyle::Crosshair => &["crosshair", "cross"],
CursorStyle::ClosedHand => &["closedhand", "grabbing", "hand2"],
CursorStyle::OpenHand => &["openhand", "grab", "hand1"],
CursorStyle::PointingHand => &["pointer", "hand", "hand2"],
CursorStyle::ResizeLeft => &["w-resize", "left_side"],
CursorStyle::ResizeRight => &["e-resize", "right_side"],
CursorStyle::ResizeLeftRight => &["ew-resize", "sb_h_double_arrow"],
CursorStyle::ResizeUp => &["n-resize", "top_side"],
CursorStyle::ResizeDown => &["s-resize", "bottom_side"],
CursorStyle::ResizeUpDown => &["sb_v_double_arrow", "ns-resize"],
CursorStyle::ResizeUpLeftDownRight => &["size_fdiag", "bd_double_arrow", "nwse-resize"],
CursorStyle::ResizeUpRightDownLeft => &["size_bdiag", "nesw-resize", "fd_double_arrow"],
CursorStyle::ResizeColumn => &["col-resize", "sb_h_double_arrow"],
CursorStyle::ResizeRow => &["row-resize", "sb_v_double_arrow"],
CursorStyle::IBeamCursorForVerticalLayout => &["vertical-text"],
CursorStyle::OperationNotAllowed => &["not-allowed", "crossed_circle"],
CursorStyle::DragLink => &["alias"],
CursorStyle::DragCopy => &["copy"],
CursorStyle::ContextualMenu => &["context-menu"],
CursorStyle::Arrow => "left_ptr",
CursorStyle::IBeam => "text",
CursorStyle::Crosshair => "crosshair",
CursorStyle::ClosedHand => "grabbing",
CursorStyle::OpenHand => "grab",
CursorStyle::PointingHand => "pointer",
CursorStyle::ResizeLeft => "w-resize",
CursorStyle::ResizeRight => "e-resize",
CursorStyle::ResizeLeftRight => "ew-resize",
CursorStyle::ResizeUp => "n-resize",
CursorStyle::ResizeDown => "s-resize",
CursorStyle::ResizeUpDown => "ns-resize",
CursorStyle::ResizeUpLeftDownRight => "nwse-resize",
CursorStyle::ResizeUpRightDownLeft => "nesw-resize",
CursorStyle::ResizeColumn => "col-resize",
CursorStyle::ResizeRow => "row-resize",
CursorStyle::IBeamCursorForVerticalLayout => "vertical-text",
CursorStyle::OperationNotAllowed => "not-allowed",
CursorStyle::DragLink => "alias",
CursorStyle::DragCopy => "copy",
CursorStyle::ContextualMenu => "context-menu",
CursorStyle::None => {
#[cfg(debug_assertions)]
panic!("CursorStyle::None should be handled separately in the client");
#[cfg(not(debug_assertions))]
&[DEFAULT_CURSOR_ICON_NAME]
"default"
}
}
}
}
#[cfg(any(feature = "wayland", feature = "x11"))]
pub(super) fn log_cursor_icon_warning(message: impl std::fmt::Display) {
if let Ok(xcursor_path) = env::var("XCURSOR_PATH") {
log::warn!(
"{:#}\ncursor icon loading may be failing if XCURSOR_PATH environment variable is invalid. \
XCURSOR_PATH overrides the default icon search. Its current value is '{}'",
message,
xcursor_path
);
} else {
log::warn!("{:#}", message);
}
}
#[cfg(any(feature = "wayland", feature = "x11"))]
impl crate::Keystroke {
pub(super) fn from_xkb(

View File

@@ -537,7 +537,7 @@ impl WaylandClient {
XDPEvent::CursorTheme(theme) => {
if let Some(client) = client.0.upgrade() {
let mut client = client.borrow_mut();
client.cursor.set_theme(theme);
client.cursor.set_theme(theme.as_str());
}
}
XDPEvent::CursorSize(size) => {
@@ -730,7 +730,7 @@ impl LinuxClient for WaylandClient {
let scale = focused_window.primary_output_scale();
state
.cursor
.set_icon(&wl_pointer, serial, style.to_icon_names(), scale);
.set_icon(&wl_pointer, serial, style.to_icon_name(), scale);
}
}
}
@@ -1252,12 +1252,12 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
keymap_state.update_mask(mods_depressed, mods_latched, mods_locked, 0, 0, group);
state.modifiers = Modifiers::from_xkb(keymap_state);
let input = PlatformInput::ModifiersChanged(ModifiersChangedEvent {
modifiers: state.modifiers,
});
drop(state);
if let Some(focused_window) = focused_window {
let input = PlatformInput::ModifiersChanged(ModifiersChangedEvent {
modifiers: state.modifiers,
});
drop(state);
focused_window.handle_input(input);
}
@@ -1530,12 +1530,9 @@ impl Dispatch<wl_pointer::WlPointer, ()> for WaylandClientStatePtr {
cursor_shape_device.set_shape(serial, style.to_shape());
} else {
let scale = window.primary_output_scale();
state.cursor.set_icon(
&wl_pointer,
serial,
style.to_icon_names(),
scale,
);
state
.cursor
.set_icon(&wl_pointer, serial, style.to_icon_name(), scale);
}
}
drop(state);

View File

@@ -1,6 +1,4 @@
use crate::Globals;
use crate::platform::linux::{DEFAULT_CURSOR_ICON_NAME, log_cursor_icon_warning};
use anyhow::{Context as _, anyhow};
use util::ResultExt;
use wayland_client::Connection;
@@ -9,143 +7,122 @@ use wayland_client::protocol::{wl_pointer::WlPointer, wl_shm::WlShm};
use wayland_cursor::{CursorImageBuffer, CursorTheme};
pub(crate) struct Cursor {
loaded_theme: Option<LoadedTheme>,
size: u32,
scaled_size: u32,
theme: Option<CursorTheme>,
theme_name: Option<String>,
theme_size: u32,
surface: WlSurface,
size: u32,
shm: WlShm,
connection: Connection,
}
pub(crate) struct LoadedTheme {
theme: CursorTheme,
name: Option<String>,
scaled_size: u32,
}
impl Drop for Cursor {
fn drop(&mut self) {
self.loaded_theme.take();
self.theme.take();
self.surface.destroy();
}
}
impl Cursor {
pub fn new(connection: &Connection, globals: &Globals, size: u32) -> Self {
let mut this = Self {
loaded_theme: None,
size,
scaled_size: size,
Self {
theme: CursorTheme::load(&connection, globals.shm.clone(), size).log_err(),
theme_name: None,
theme_size: size,
surface: globals.compositor.create_surface(&globals.qh, ()),
shm: globals.shm.clone(),
connection: connection.clone(),
};
this.set_theme_internal(None);
this
size,
}
}
fn set_theme_internal(&mut self, theme_name: Option<String>) {
if let Some(loaded_theme) = self.loaded_theme.as_ref() {
if loaded_theme.name == theme_name && loaded_theme.scaled_size == self.scaled_size {
return;
}
}
let result = if let Some(theme_name) = theme_name.as_ref() {
CursorTheme::load_from_name(
&self.connection,
self.shm.clone(),
theme_name,
self.scaled_size,
)
} else {
CursorTheme::load(&self.connection, self.shm.clone(), self.scaled_size)
};
if let Some(theme) = result
.context("Wayland: Failed to load cursor theme")
.log_err()
pub fn set_theme(&mut self, theme_name: &str) {
if let Some(theme) = CursorTheme::load_from_name(
&self.connection,
self.shm.clone(),
theme_name,
self.theme_size,
)
.log_err()
{
self.loaded_theme = Some(LoadedTheme {
theme,
name: theme_name.map(|name| name.to_string()),
scaled_size: self.scaled_size,
});
self.theme = Some(theme);
self.theme_name = Some(theme_name.to_string());
} else if let Some(theme) =
CursorTheme::load(&self.connection, self.shm.clone(), self.theme_size).log_err()
{
self.theme = Some(theme);
self.theme_name = None;
}
}
pub fn set_theme(&mut self, theme_name: String) {
self.set_theme_internal(Some(theme_name));
}
fn set_scaled_size(&mut self, scaled_size: u32) {
self.scaled_size = scaled_size;
let theme_name = self
.loaded_theme
fn set_theme_size(&mut self, theme_size: u32) {
self.theme = self
.theme_name
.as_ref()
.and_then(|loaded_theme| loaded_theme.name.clone());
self.set_theme_internal(theme_name);
.and_then(|name| {
CursorTheme::load_from_name(
&self.connection,
self.shm.clone(),
name.as_str(),
theme_size,
)
.log_err()
})
.or_else(|| {
CursorTheme::load(&self.connection, self.shm.clone(), theme_size).log_err()
});
}
pub fn set_size(&mut self, size: u32) {
self.size = size;
self.set_scaled_size(size);
self.set_theme_size(size);
}
pub fn set_icon(
&mut self,
wl_pointer: &WlPointer,
serial_id: u32,
mut cursor_icon_names: &[&str],
mut cursor_icon_name: &str,
scale: i32,
) {
self.set_scaled_size(self.size * scale as u32);
self.set_theme_size(self.size * scale as u32);
let Some(loaded_theme) = &mut self.loaded_theme else {
log::warn!("Wayland: Unable to load cursor themes");
return;
};
let mut theme = &mut loaded_theme.theme;
if let Some(theme) = &mut self.theme {
let mut buffer: Option<&CursorImageBuffer>;
let mut buffer: &CursorImageBuffer;
'outer: {
for cursor_icon_name in cursor_icon_names {
if let Some(cursor) = theme.get_cursor(cursor_icon_name) {
buffer = &cursor[0];
break 'outer;
}
}
if let Some(cursor) = theme.get_cursor(DEFAULT_CURSOR_ICON_NAME) {
buffer = &cursor[0];
log_cursor_icon_warning(anyhow!(
"wayland: Unable to get cursor icon {:?}. \
Using default cursor icon: '{}'",
cursor_icon_names,
DEFAULT_CURSOR_ICON_NAME
));
if let Some(cursor) = theme.get_cursor(&cursor_icon_name) {
buffer = Some(&cursor[0]);
} else if let Some(cursor) = theme.get_cursor("default") {
buffer = Some(&cursor[0]);
cursor_icon_name = "default";
log::warn!(
"Linux: Wayland: Unable to get cursor icon: {}. Using default cursor icon",
cursor_icon_name
);
} else {
log_cursor_icon_warning(anyhow!(
"wayland: Unable to fallback on default cursor icon '{}' for theme '{}'",
DEFAULT_CURSOR_ICON_NAME,
loaded_theme.name.as_deref().unwrap_or("default")
));
return;
buffer = None;
log::warn!("Linux: Wayland: Unable to get default cursor too!");
}
if let Some(buffer) = &mut buffer {
let (width, height) = buffer.dimensions();
let (hot_x, hot_y) = buffer.hotspot();
self.surface.set_buffer_scale(scale);
wl_pointer.set_cursor(
serial_id,
Some(&self.surface),
hot_x as i32 / scale,
hot_y as i32 / scale,
);
self.surface.attach(Some(&buffer), 0, 0);
self.surface.damage(0, 0, width as i32, height as i32);
self.surface.commit();
}
} else {
log::warn!("Linux: Wayland: Unable to load cursor themes");
}
let (width, height) = buffer.dimensions();
let (hot_x, hot_y) = buffer.hotspot();
self.surface.set_buffer_scale(scale);
wl_pointer.set_cursor(
serial_id,
Some(&self.surface),
hot_x as i32 / scale,
hot_y as i32 / scale,
);
self.surface.attach(Some(&buffer), 0, 0);
self.surface.damage(0, 0, width as i32, height as i32);
self.surface.commit();
}
}

View File

@@ -8,7 +8,7 @@ use std::{
time::{Duration, Instant},
};
use anyhow::{Context as _, anyhow};
use anyhow::Context as _;
use calloop::{
EventLoop, LoopHandle, RegistrationToken,
generic::{FdWrapper, Generic},
@@ -51,8 +51,7 @@ use crate::platform::{
LinuxCommon, PlatformWindow,
blade::BladeContext,
linux::{
DEFAULT_CURSOR_ICON_NAME, LinuxClient, get_xkb_compose_state, is_within_click_distance,
log_cursor_icon_warning, open_uri_internal,
LinuxClient, get_xkb_compose_state, is_within_click_distance, open_uri_internal,
platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES},
reveal_path_internal,
xdg_desktop_portal::{Event as XDPEvent, XDPEventSource},
@@ -212,7 +211,7 @@ pub struct X11ClientState {
pub(crate) pre_key_char_down: Option<Keystroke>,
pub(crate) cursor_handle: cursor::Handle,
pub(crate) cursor_styles: HashMap<xproto::Window, CursorStyle>,
pub(crate) cursor_cache: HashMap<CursorStyle, Option<xproto::Cursor>>,
pub(crate) cursor_cache: HashMap<CursorStyle, xproto::Cursor>,
pointer_device_states: BTreeMap<xinput::DeviceId, PointerDeviceState>,
@@ -1502,8 +1501,22 @@ impl LinuxClient for X11Client {
return;
}
let Some(cursor) = state.get_cursor_icon(style) else {
return;
let cursor = match state.cursor_cache.get(&style) {
Some(cursor) => *cursor,
None => {
let Some(cursor) = (match style {
CursorStyle::None => create_invisible_cursor(&state.xcb_connection).log_err(),
_ => state
.cursor_handle
.load_cursor(&state.xcb_connection, style.to_icon_name())
.log_err(),
}) else {
return;
};
state.cursor_cache.insert(style, cursor);
cursor
}
};
state.cursor_styles.insert(focused_window, style);
@@ -1760,78 +1773,6 @@ impl X11ClientState {
})
.expect("Failed to initialize refresh timer")
}
fn get_cursor_icon(&mut self, style: CursorStyle) -> Option<xproto::Cursor> {
if let Some(cursor) = self.cursor_cache.get(&style) {
return *cursor;
}
let mut result;
match style {
CursorStyle::None => match create_invisible_cursor(&self.xcb_connection) {
Ok(loaded_cursor) => result = Ok(loaded_cursor),
Err(err) => result = Err(err.context("error while creating invisible cursor")),
},
_ => 'outer: {
let mut errors = String::new();
let cursor_icon_names = style.to_icon_names();
for cursor_icon_name in cursor_icon_names {
match self
.cursor_handle
.load_cursor(&self.xcb_connection, cursor_icon_name)
{
Ok(loaded_cursor) => {
if loaded_cursor != x11rb::NONE {
result = Ok(loaded_cursor);
break 'outer;
}
}
Err(err) => {
errors.push_str(&err.to_string());
errors.push('\n');
}
}
}
if errors.is_empty() {
result = Err(anyhow!(
"errors while loading cursor icons {:?}:\n{}",
cursor_icon_names,
errors
));
} else {
result = Err(anyhow!("did not find cursor icons {:?}", cursor_icon_names));
}
}
};
let cursor = match result {
Ok(cursor) => Some(cursor),
Err(err) => {
match self
.cursor_handle
.load_cursor(&self.xcb_connection, DEFAULT_CURSOR_ICON_NAME)
{
Ok(default) => {
log_cursor_icon_warning(err.context(format!(
"x11: error loading cursor icon, falling back on default icon '{}'",
DEFAULT_CURSOR_ICON_NAME
)));
Some(default)
}
Err(default_err) => {
log_cursor_icon_warning(err.context(default_err).context(format!(
"x11: error loading default cursor fallback '{}'",
DEFAULT_CURSOR_ICON_NAME
)));
None
}
}
}
};
self.cursor_cache.insert(style, cursor);
cursor
}
}
// Adapted from:

View File

@@ -288,8 +288,6 @@ pub struct CopilotSettings {
pub proxy: Option<String>,
/// Disable certificate verification for proxy (not recommended).
pub proxy_no_verify: Option<bool>,
/// Enterprise URI for Copilot.
pub enterprise_uri: Option<String>,
}
/// The settings for all languages.
@@ -609,11 +607,6 @@ pub struct CopilotSettingsContent {
/// Default: false
#[serde(default)]
pub proxy_no_verify: Option<bool>,
/// Enterprise URI for Copilot.
///
/// Default: none
#[serde(default)]
pub enterprise_uri: Option<String>,
}
/// The settings for enabling/disabling features.
@@ -1235,10 +1228,10 @@ impl settings::Settings for AllLanguageSettings {
let mut copilot_settings = default_value
.edit_predictions
.as_ref()
.map(|settings| CopilotSettings {
proxy: settings.copilot.proxy.clone(),
proxy_no_verify: settings.copilot.proxy_no_verify,
enterprise_uri: settings.copilot.enterprise_uri.clone(),
.map(|settings| settings.copilot.clone())
.map(|copilot| CopilotSettings {
proxy: copilot.proxy,
proxy_no_verify: copilot.proxy_no_verify,
})
.unwrap_or_default();
@@ -1294,14 +1287,6 @@ impl settings::Settings for AllLanguageSettings {
copilot_settings.proxy_no_verify = Some(proxy_no_verify);
}
if let Some(enterprise_uri) = user_settings
.edit_predictions
.as_ref()
.and_then(|settings| settings.copilot.enterprise_uri.clone())
{
copilot_settings.enterprise_uri = Some(enterprise_uri);
}
// A user's global settings override the default global settings and
// all default language-specific settings.
merge_settings(&mut defaults, &user_settings.defaults);

View File

@@ -26,7 +26,6 @@ gpui.workspace = true
http_client.workspace = true
icons.workspace = true
image.workspace = true
log.workspace = true
parking_lot.workspace = true
proto.workspace = true
schemars.workspace = true

View File

@@ -1,5 +1,5 @@
use anthropic::ANTHROPIC_API_URL;
use anyhow::{Context as _, anyhow};
use anthropic::{ANTHROPIC_API_URL, AnthropicError};
use anyhow::{Context as _, Result, anyhow};
use client::telemetry::Telemetry;
use gpui::BackgroundExecutor;
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
@@ -20,17 +20,13 @@ pub fn report_assistant_event(
if let Some(telemetry) = telemetry.as_ref() {
telemetry.report_assistant_event(event.clone());
if telemetry.metrics_enabled() && event.model_provider == ANTHROPIC_PROVIDER_ID {
if let Some(api_key) = model_api_key {
executor
.spawn(async move {
report_anthropic_event(event, client, api_key)
.await
.log_err();
})
.detach();
} else {
log::error!("Cannot send Anthropic telemetry because API key is missing");
}
executor
.spawn(async move {
report_anthropic_event(event, client, model_api_key)
.await
.log_err();
})
.detach();
}
}
}
@@ -38,8 +34,17 @@ pub fn report_assistant_event(
async fn report_anthropic_event(
event: AssistantEventData,
client: Arc<dyn HttpClient>,
api_key: String,
) -> anyhow::Result<()> {
model_api_key: Option<String>,
) -> Result<(), AnthropicError> {
let api_key = match model_api_key {
Some(key) => key,
None => {
return Err(AnthropicError::Other(anyhow!(
"Anthropic API key is not set"
)));
}
};
let uri = format!("{ANTHROPIC_API_URL}/v1/log/zed");
let request_builder = HttpRequest::builder()
.method(Method::POST)
@@ -67,19 +72,19 @@ async fn report_anthropic_event(
let request = request_builder
.body(AsyncBody::from(serialized_event.to_string()))
.context("Failed to construct Anthropic telemetry HTTP request body")?;
.context("failed to construct request body")?;
let response = client
.send(request)
.await
.context("Failed to send telemetry HTTP request to Anthropic")?;
.context("failed to send request to Anthropic")?;
if response.status().is_success() {
Ok(())
} else {
Err(anyhow!(
"Anthropic telemetry logging failed with HTTP status: {}",
response.status()
))
return Ok(());
}
return Err(AnthropicError::Other(anyhow!(
"Failed to log: {}",
response.status(),
)));
}

View File

@@ -58,7 +58,6 @@ ui.workspace = true
util.workspace = true
workspace-hack.workspace = true
zed_llm_client.workspace = true
language.workspace = true
[dev-dependencies]
editor = { workspace = true, features = ["test-support"] }

View File

@@ -10,14 +10,15 @@ use copilot::copilot_chat::{
ToolCall,
};
use copilot::{Copilot, Status};
use editor::{Editor, EditorElement, EditorStyle};
use fs::Fs;
use futures::future::BoxFuture;
use futures::stream::BoxStream;
use futures::{FutureExt, Stream, StreamExt};
use gpui::{
Action, Animation, AnimationExt, AnyView, App, AsyncApp, Entity, Render, Subscription, Task,
Transformation, percentage, svg,
Action, Animation, AnimationExt, AnyView, App, AsyncApp, Entity, FontStyle, Render,
Subscription, Task, TextStyle, Transformation, WhiteSpace, percentage, svg,
};
use language::language_settings::all_language_settings;
use language_model::{
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
@@ -26,14 +27,18 @@ use language_model::{
LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, RateLimiter, Role,
StopReason,
};
use settings::SettingsStore;
use settings::{Settings, SettingsStore, update_settings_file};
use std::time::Duration;
use theme::ThemeSettings;
use ui::prelude::*;
use util::debug_panic;
use crate::{AllLanguageModelSettings, CopilotChatSettingsContent};
use super::anthropic::count_anthropic_tokens;
use super::google::count_google_tokens;
use super::open_ai::count_open_ai_tokens;
pub(crate) use copilot::copilot_chat::CopilotChatSettings;
const PROVIDER_ID: &str = "copilot_chat";
const PROVIDER_NAME: &str = "GitHub Copilot Chat";
@@ -64,16 +69,11 @@ impl CopilotChatLanguageModelProvider {
_copilot_chat_subscription: copilot_chat_subscription,
_settings_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
if let Some(copilot_chat) = CopilotChat::global(cx) {
let language_settings = all_language_settings(None, cx);
let configuration = copilot::copilot_chat::CopilotChatConfiguration {
enterprise_uri: language_settings
.edit_predictions
.copilot
.enterprise_uri
.clone(),
};
let settings = AllLanguageModelSettings::get_global(cx)
.copilot_chat
.clone();
copilot_chat.update(cx, |chat, cx| {
chat.set_configuration(configuration, cx);
chat.set_settings(settings, cx);
});
}
cx.notify();
@@ -174,9 +174,10 @@ impl LanguageModelProvider for CopilotChatLanguageModelProvider {
Task::ready(Err(err.into()))
}
fn configuration_view(&self, _: &mut Window, cx: &mut App) -> AnyView {
fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView {
let state = self.state.clone();
cx.new(|cx| ConfigurationView::new(state, cx)).into()
cx.new(|cx| ConfigurationView::new(state, window, cx))
.into()
}
fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
@@ -621,15 +622,38 @@ fn into_copilot_chat(
struct ConfigurationView {
copilot_status: Option<copilot::Status>,
api_url_editor: Entity<Editor>,
models_url_editor: Entity<Editor>,
auth_url_editor: Entity<Editor>,
state: Entity<State>,
_subscription: Option<Subscription>,
}
impl ConfigurationView {
pub fn new(state: Entity<State>, cx: &mut Context<Self>) -> Self {
pub fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
let copilot = Copilot::global(cx);
let settings = AllLanguageModelSettings::get_global(cx)
.copilot_chat
.clone();
let api_url_editor = cx.new(|cx| Editor::single_line(window, cx));
api_url_editor.update(cx, |this, cx| {
this.set_text(settings.api_url.clone(), window, cx);
this.set_placeholder_text("GitHub Copilot API URL", cx);
});
let models_url_editor = cx.new(|cx| Editor::single_line(window, cx));
models_url_editor.update(cx, |this, cx| {
this.set_text(settings.models_url.clone(), window, cx);
this.set_placeholder_text("GitHub Copilot Models URL", cx);
});
let auth_url_editor = cx.new(|cx| Editor::single_line(window, cx));
auth_url_editor.update(cx, |this, cx| {
this.set_text(settings.auth_url.clone(), window, cx);
this.set_placeholder_text("GitHub Copilot Auth URL", cx);
});
Self {
api_url_editor,
models_url_editor,
auth_url_editor,
copilot_status: copilot.as_ref().map(|copilot| copilot.read(cx).status()),
state,
_subscription: copilot.as_ref().map(|copilot| {
@@ -640,6 +664,104 @@ impl ConfigurationView {
}),
}
}
fn make_input_styles(&self, cx: &App) -> Div {
let bg_color = cx.theme().colors().editor_background;
let border_color = cx.theme().colors().border;
h_flex()
.w_full()
.px_2()
.py_1()
.bg(bg_color)
.border_1()
.border_color(border_color)
.rounded_sm()
}
fn make_text_style(&self, cx: &Context<Self>) -> TextStyle {
let settings = ThemeSettings::get_global(cx);
TextStyle {
color: cx.theme().colors().text,
font_family: settings.ui_font.family.clone(),
font_features: settings.ui_font.features.clone(),
font_fallbacks: settings.ui_font.fallbacks.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
text_overflow: None,
text_align: Default::default(),
line_clamp: None,
}
}
fn render_api_url_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
let text_style = self.make_text_style(cx);
EditorElement::new(
&self.api_url_editor,
EditorStyle {
background: cx.theme().colors().editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
},
)
}
fn render_auth_url_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
let text_style = self.make_text_style(cx);
EditorElement::new(
&self.auth_url_editor,
EditorStyle {
background: cx.theme().colors().editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
},
)
}
fn render_models_editor(&self, cx: &mut Context<Self>) -> impl IntoElement {
let text_style = self.make_text_style(cx);
EditorElement::new(
&self.models_url_editor,
EditorStyle {
background: cx.theme().colors().editor_background,
local_player: cx.theme().players().local(),
text: text_style,
..Default::default()
},
)
}
fn update_copilot_settings(&self, cx: &mut Context<'_, Self>) {
let settings = CopilotChatSettings {
api_url: self.api_url_editor.read(cx).text(cx).into(),
models_url: self.models_url_editor.read(cx).text(cx).into(),
auth_url: self.auth_url_editor.read(cx).text(cx).into(),
};
update_settings_file::<AllLanguageModelSettings>(<dyn Fs>::global(cx), cx, {
let settings = settings.clone();
move |content, _| {
content.copilot_chat = Some(CopilotChatSettingsContent {
api_url: Some(settings.api_url.as_ref().into()),
models_url: Some(settings.models_url.as_ref().into()),
auth_url: Some(settings.auth_url.as_ref().into()),
});
}
});
if let Some(chat) = CopilotChat::global(cx) {
chat.update(cx, |this, cx| {
this.set_settings(settings, cx);
});
}
}
}
impl Render for ConfigurationView {
@@ -697,15 +819,59 @@ impl Render for ConfigurationView {
}
_ => {
const LABEL: &str = "To use Zed's assistant with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription.";
v_flex().gap_2().child(Label::new(LABEL)).child(
Button::new("sign_in", "Sign in to use GitHub Copilot")
.icon_color(Color::Muted)
.icon(IconName::Github)
.icon_position(IconPosition::Start)
.icon_size(IconSize::Medium)
.full_width()
.on_click(|_, window, cx| copilot::initiate_sign_in(window, cx)),
)
v_flex()
.gap_2()
.child(Label::new(LABEL))
.on_action(cx.listener(|this, _: &menu::Confirm, window, cx| {
this.update_copilot_settings(cx);
copilot::initiate_sign_in(window, cx);
}))
.child(
v_flex()
.gap_0p5()
.child(Label::new("API URL").size(LabelSize::Small))
.child(
self.make_input_styles(cx)
.child(self.render_api_url_editor(cx)),
),
)
.child(
v_flex()
.gap_0p5()
.child(Label::new("Auth URL").size(LabelSize::Small))
.child(
self.make_input_styles(cx)
.child(self.render_auth_url_editor(cx)),
),
)
.child(
v_flex()
.gap_0p5()
.child(Label::new("Models list URL").size(LabelSize::Small))
.child(
self.make_input_styles(cx)
.child(self.render_models_editor(cx)),
),
)
.child(
Button::new("sign_in", "Sign in to use GitHub Copilot")
.icon_color(Color::Muted)
.icon(IconName::Github)
.icon_position(IconPosition::Start)
.icon_size(IconSize::Medium)
.full_width()
.on_click(cx.listener(|this, _, window, cx| {
this.update_copilot_settings(cx);
copilot::initiate_sign_in(window, cx)
})),
)
.child(
Label::new(
format!("You can also assign the {} environment variable and restart Zed.", copilot::copilot_chat::COPILOT_OAUTH_ENV_VAR),
)
.size(LabelSize::Small)
.color(Color::Muted),
)
}
},
None => v_flex().gap_6().child(Label::new(ERROR_LABEL)),

View File

@@ -437,29 +437,14 @@ pub fn into_google(
content
.into_iter()
.flat_map(|content| match content {
language_model::MessageContent::Text(text) => {
language_model::MessageContent::Text(text)
| language_model::MessageContent::Thinking { text, .. } => {
if !text.is_empty() {
vec![Part::TextPart(google_ai::TextPart { text })]
} else {
vec![]
}
}
language_model::MessageContent::Thinking {
text: _,
signature: Some(signature),
} => {
if !signature.is_empty() {
vec![Part::ThoughtPart(google_ai::ThoughtPart {
thought: true,
thought_signature: signature,
})]
} else {
vec![]
}
}
language_model::MessageContent::Thinking { .. } => {
vec![]
}
language_model::MessageContent::RedactedThinking(_) => vec![],
language_model::MessageContent::Image(image) => {
vec![Part::InlineDataPart(google_ai::InlineDataPart {
@@ -679,12 +664,7 @@ impl GoogleEventMapper {
)));
}
Part::FunctionResponsePart(_) => {}
Part::ThoughtPart(part) => {
events.push(Ok(LanguageModelCompletionEvent::Thinking {
text: "(Encrypted thought)".to_string(), // TODO: Can we populate this from thought summaries?
signature: Some(part.thought_signature),
}));
}
Part::ThoughtPart(_) => {}
});
}
}

View File

@@ -13,6 +13,7 @@ use crate::provider::{
anthropic::AnthropicSettings,
bedrock::AmazonBedrockSettings,
cloud::{self, ZedDotDevSettings},
copilot_chat::CopilotChatSettings,
deepseek::DeepSeekSettings,
google::GoogleSettings,
lmstudio::LmStudioSettings,
@@ -64,7 +65,7 @@ pub struct AllLanguageModelSettings {
pub open_router: OpenRouterSettings,
pub zed_dot_dev: ZedDotDevSettings,
pub google: GoogleSettings,
pub copilot_chat: CopilotChatSettings,
pub lmstudio: LmStudioSettings,
pub deepseek: DeepSeekSettings,
pub mistral: MistralSettings,
@@ -82,7 +83,7 @@ pub struct AllLanguageModelSettingsContent {
pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
pub google: Option<GoogleSettingsContent>,
pub deepseek: Option<DeepseekSettingsContent>,
pub copilot_chat: Option<CopilotChatSettingsContent>,
pub mistral: Option<MistralSettingsContent>,
}
@@ -270,6 +271,13 @@ pub struct ZedDotDevSettingsContent {
available_models: Option<Vec<cloud::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct CopilotChatSettingsContent {
pub api_url: Option<String>,
pub auth_url: Option<String>,
pub models_url: Option<String>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OpenRouterSettingsContent {
pub api_url: Option<String>,
@@ -427,6 +435,24 @@ impl settings::Settings for AllLanguageModelSettings {
.as_ref()
.and_then(|s| s.available_models.clone()),
);
// Copilot Chat
let copilot_chat = value.copilot_chat.clone().unwrap_or_default();
settings.copilot_chat.api_url = copilot_chat.api_url.map_or_else(
|| Arc::from("https://api.githubcopilot.com/chat/completions"),
Arc::from,
);
settings.copilot_chat.auth_url = copilot_chat.auth_url.map_or_else(
|| Arc::from("https://api.github.com/copilot_internal/v2/token"),
Arc::from,
);
settings.copilot_chat.models_url = copilot_chat.models_url.map_or_else(
|| Arc::from("https://api.githubcopilot.com/models"),
Arc::from,
);
}
Ok(settings)

Some files were not shown because too many files have changed in this diff Show More