Compare commits
59 Commits
v0.187.6
...
gpui-butto
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1711f91f6 | ||
|
|
8fd2316deb | ||
|
|
7357796969 | ||
|
|
e26620d1cf | ||
|
|
9dabf491f0 | ||
|
|
f2dcc98216 | ||
|
|
23bbfc4b94 | ||
|
|
98aefcca83 | ||
|
|
9be1e9aab1 | ||
|
|
33b60bc16d | ||
|
|
0355b9dfab | ||
|
|
6bec76cd5d | ||
|
|
d4f47aa653 | ||
|
|
5112fcebeb | ||
|
|
dcf7f714f7 | ||
|
|
16f668b8e3 | ||
|
|
0f4e52bde8 | ||
|
|
dfe37b0a07 | ||
|
|
2da37988b5 | ||
|
|
05955e4faa | ||
|
|
1d043b37fb | ||
|
|
18d39e3f81 | ||
|
|
cc3a28a8e8 | ||
|
|
0f17e82154 | ||
|
|
a316428686 | ||
|
|
355266988d | ||
|
|
72007c9a62 | ||
|
|
c2feffac9d | ||
|
|
4b7b5db58c | ||
|
|
58ba833792 | ||
|
|
f021b401f4 | ||
|
|
47f6d4e5a7 | ||
|
|
e60f029525 | ||
|
|
d7b5c61ec8 | ||
|
|
23d42e3eaf | ||
|
|
b2fc4064c0 | ||
|
|
bba3db9378 | ||
|
|
5078f0b5ef | ||
|
|
607bfd3b1c | ||
|
|
87cb498a41 | ||
|
|
6420df3975 | ||
|
|
83498ebf2b | ||
|
|
1fb1fecb0a | ||
|
|
bc99a86bb7 | ||
|
|
fcfe4e2c14 | ||
|
|
ef511976be | ||
|
|
c80aaca0c5 | ||
|
|
234d6ce5f5 | ||
|
|
96a0568fb7 | ||
|
|
b6828e5ce8 | ||
|
|
78d3ce4090 | ||
|
|
d01559f9bc | ||
|
|
645f662853 | ||
|
|
d42cb111f4 | ||
|
|
dce6e96c16 | ||
|
|
4280bff10a | ||
|
|
ea5b289459 | ||
|
|
09503333af | ||
|
|
775370fd7d |
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
@@ -28,8 +28,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
4
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -28,8 +28,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -49,8 +49,8 @@ body:
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: Copy System Specs Into Clipboard"
|
||||
Open Zed, from the command palette select "zed: copy system specs into clipboard"
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -26,9 +26,9 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
52
Cargo.lock
generated
52
Cargo.lock
generated
@@ -81,12 +81,12 @@ dependencies = [
|
||||
"http_client",
|
||||
"indexed_docs",
|
||||
"indoc",
|
||||
"inventory",
|
||||
"itertools 0.14.0",
|
||||
"jsonschema",
|
||||
"language",
|
||||
"language_model",
|
||||
"language_model_selector",
|
||||
"linkme",
|
||||
"log",
|
||||
"lsp",
|
||||
"markdown",
|
||||
@@ -674,7 +674,6 @@ dependencies = [
|
||||
"language",
|
||||
"language_model",
|
||||
"language_models",
|
||||
"linkme",
|
||||
"log",
|
||||
"markdown",
|
||||
"open",
|
||||
@@ -2793,6 +2792,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion 0.3.2",
|
||||
"async-tungstenite",
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"clock",
|
||||
"cocoa 0.26.0",
|
||||
@@ -2804,6 +2804,7 @@ dependencies = [
|
||||
"gpui_tokio",
|
||||
"http_client",
|
||||
"http_client_tls",
|
||||
"httparse",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
@@ -2824,6 +2825,7 @@ dependencies = [
|
||||
"time",
|
||||
"tiny_http",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
"tokio-socks",
|
||||
"url",
|
||||
"util",
|
||||
@@ -3177,7 +3179,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"collections",
|
||||
"gpui",
|
||||
"linkme",
|
||||
"inventory",
|
||||
"parking_lot",
|
||||
"strum 0.27.1",
|
||||
"theme",
|
||||
@@ -4327,7 +4329,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"indoc",
|
||||
"language",
|
||||
"linkme",
|
||||
"log",
|
||||
"lsp",
|
||||
"markdown",
|
||||
@@ -4953,7 +4954,6 @@ dependencies = [
|
||||
"clap",
|
||||
"client",
|
||||
"collections",
|
||||
"debug_adapter_extension",
|
||||
"dirs 4.0.0",
|
||||
"dotenv",
|
||||
"env_logger 0.11.8",
|
||||
@@ -5068,6 +5068,7 @@ dependencies = [
|
||||
"task",
|
||||
"toml 0.8.20",
|
||||
"util",
|
||||
"wasi-preview1-component-adapter-provider",
|
||||
"wasm-encoder 0.221.3",
|
||||
"wasmparser 0.221.3",
|
||||
"wit-component 0.221.3",
|
||||
@@ -5162,6 +5163,7 @@ dependencies = [
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"num-format",
|
||||
"picker",
|
||||
"project",
|
||||
@@ -6026,7 +6028,6 @@ dependencies = [
|
||||
"language",
|
||||
"language_model",
|
||||
"linkify",
|
||||
"linkme",
|
||||
"log",
|
||||
"markdown",
|
||||
"menu",
|
||||
@@ -8163,26 +8164,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linkme"
|
||||
version = "0.3.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22d227772b5999ddc0690e733f734f95ca05387e329c4084fe65678c51198ffe"
|
||||
dependencies = [
|
||||
"linkme-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linkme-impl"
|
||||
version = "0.3.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71a98813fa0073a317ed6a8055dcd4722a49d9b862af828ee68449adb799b6be"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.15"
|
||||
@@ -9105,7 +9086,6 @@ dependencies = [
|
||||
"component",
|
||||
"db",
|
||||
"gpui",
|
||||
"linkme",
|
||||
"rpc",
|
||||
"settings",
|
||||
"sum_tree",
|
||||
@@ -11862,7 +11842,6 @@ dependencies = [
|
||||
"clock",
|
||||
"dap",
|
||||
"dap_adapters",
|
||||
"debug_adapter_extension",
|
||||
"env_logger 0.11.8",
|
||||
"extension",
|
||||
"extension_host",
|
||||
@@ -15687,7 +15666,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"icons",
|
||||
"itertools 0.14.0",
|
||||
"linkme",
|
||||
"menu",
|
||||
"serde",
|
||||
"settings",
|
||||
@@ -15708,7 +15686,6 @@ dependencies = [
|
||||
"component",
|
||||
"editor",
|
||||
"gpui",
|
||||
"linkme",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
@@ -15720,7 +15697,6 @@ name = "ui_macros"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"convert_case 0.8.0",
|
||||
"linkme",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
@@ -16239,6 +16215,12 @@ dependencies = [
|
||||
"wit-bindgen-rt 0.39.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasi-preview1-component-adapter-provider"
|
||||
version = "29.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcd9f21bbde82ba59e415a8725e6ad0d0d7e9e460b1a3ccbca5bdee952c1a324"
|
||||
|
||||
[[package]]
|
||||
name = "wasite"
|
||||
version = "0.1.0"
|
||||
@@ -16941,7 +16923,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"install_cli",
|
||||
"language",
|
||||
"linkme",
|
||||
"picker",
|
||||
"project",
|
||||
"schemars",
|
||||
@@ -18542,7 +18523,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.187.6"
|
||||
version = "0.188.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
@@ -18574,7 +18555,6 @@ dependencies = [
|
||||
"dap",
|
||||
"dap_adapters",
|
||||
"db",
|
||||
"debug_adapter_extension",
|
||||
"debugger_tools",
|
||||
"debugger_ui",
|
||||
"diagnostics",
|
||||
@@ -18736,9 +18716,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_llm_client"
|
||||
version = "0.8.2"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9be71e2f9b271e1eb8eb3e0d986075e770d1a0a299fb036abc3f1fc13a2fa7eb"
|
||||
checksum = "16d993fc42f9ec43ab76fa46c6eb579a66e116bb08cd2bc9a67f3afcaa05d39d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"serde",
|
||||
|
||||
@@ -465,7 +465,6 @@ jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,r
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
linkme = "0.3.31"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c9c189f1c5dd53c624a419ce35bc77ad6a908d18" }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
@@ -595,6 +594,7 @@ url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
walkdir = "2.3"
|
||||
wasi-preview1-component-adapter-provider = "29"
|
||||
wasm-encoder = "0.221"
|
||||
wasmparser = "0.221"
|
||||
wasmtime = { version = "29", default-features = false, features = [
|
||||
@@ -608,7 +608,7 @@ wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
zed_llm_client = "0.8.2"
|
||||
zed_llm_client = "0.8.1"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
@@ -788,6 +788,9 @@ let_underscore_future = "allow"
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
# We often have large enum variants yet we rarely actually bother with splitting them up.
|
||||
large_enum_variant = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = [
|
||||
"bindgen",
|
||||
@@ -795,7 +798,6 @@ ignored = [
|
||||
"prost_build",
|
||||
"serde",
|
||||
"component",
|
||||
"linkme",
|
||||
"documented",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.86-bookworm as builder
|
||||
FROM rust:1.87-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -766,7 +766,7 @@
|
||||
"alt-ctrl-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"shift-find": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -928,7 +928,6 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
|
||||
@@ -825,7 +825,7 @@
|
||||
"alt-cmd-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"cmd-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"cmd-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -1011,7 +1011,7 @@
|
||||
"alt-right": ["terminal::SendText", "\u001bf"],
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
// There are conflicting bindings for these keys in the global context.
|
||||
// these bindings override them, remove at your own risk:
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
|
||||
@@ -51,7 +51,9 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"f3": "editor::FindNextMatch",
|
||||
"shift-f3": "editor::FindPreviousMatch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -53,7 +53,9 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"cmd-g": "editor::FindNextMatch",
|
||||
"cmd-shift-g": "editor::FindPreviousMatch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -335,7 +335,9 @@
|
||||
// Whether to show onboarding banners in the titlebar.
|
||||
"show_onboarding_banner": true,
|
||||
// Whether to show user picture in the titlebar.
|
||||
"show_user_picture": true
|
||||
"show_user_picture": true,
|
||||
// Whether to show the sign in button in the titlebar.
|
||||
"show_sign_in": true
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
@@ -756,6 +758,8 @@
|
||||
"stream_edits": false,
|
||||
// When enabled, agent edits will be displayed in single-file editors for review
|
||||
"single_file_review": true,
|
||||
// When enabled, show voting thumbs for feedback on agent edits.
|
||||
"enable_feedback": true,
|
||||
"default_profile": "write",
|
||||
"profiles": {
|
||||
"write": {
|
||||
|
||||
@@ -47,12 +47,12 @@ heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
itertools.workspace = true
|
||||
jsonschema.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
||||
@@ -185,12 +185,14 @@ pub(crate) fn default_markdown_style(window: &Window, cx: &App) -> MarkdownStyle
|
||||
let ui_font_size = TextSize::Default.rems(cx);
|
||||
let buffer_font_size = TextSize::Small.rems(cx);
|
||||
let mut text_style = window.text_style();
|
||||
let line_height = buffer_font_size * 1.75;
|
||||
|
||||
text_style.refine(&TextStyleRefinement {
|
||||
font_family: Some(theme_settings.ui_font.family.clone()),
|
||||
font_fallbacks: theme_settings.ui_font.fallbacks.clone(),
|
||||
font_features: Some(theme_settings.ui_font.features.clone()),
|
||||
font_size: Some(ui_font_size.into()),
|
||||
line_height: Some(line_height.into()),
|
||||
color: Some(cx.theme().colors().text),
|
||||
..Default::default()
|
||||
});
|
||||
@@ -1720,10 +1722,11 @@ impl ActiveThread {
|
||||
.on_action(cx.listener(Self::confirm_editing_message))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.min_h_6()
|
||||
.flex_grow()
|
||||
.w_full()
|
||||
.flex_grow()
|
||||
.gap_2()
|
||||
.child(EditorElement::new(
|
||||
.child(state.context_strip.clone())
|
||||
.child(div().pt(px(-3.)).px_neg_0p5().child(EditorElement::new(
|
||||
&state.editor,
|
||||
EditorStyle {
|
||||
background: colors.editor_background,
|
||||
@@ -1732,8 +1735,7 @@ impl ActiveThread {
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
))
|
||||
.child(state.context_strip.clone())
|
||||
)))
|
||||
}
|
||||
|
||||
fn render_message(&self, ix: usize, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
|
||||
@@ -1861,7 +1863,8 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None => feedback_container
|
||||
None if AssistantSettings::get_global(cx).enable_feedback =>
|
||||
feedback_container
|
||||
.child(
|
||||
div().visible_on_hover("feedback_container").child(
|
||||
Label::new(
|
||||
@@ -1904,6 +1907,9 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None => feedback_container
|
||||
.child(h_flex().child(open_as_markdown))
|
||||
.into_any_element(),
|
||||
};
|
||||
|
||||
let message_is_empty = message.should_display_content();
|
||||
@@ -1917,16 +1923,6 @@ impl ActiveThread {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.when(!added_context.is_empty(), |parent| {
|
||||
parent.child(h_flex().flex_wrap().gap_1().children(
|
||||
added_context.into_iter().map(|added_context| {
|
||||
@@ -1945,6 +1941,16 @@ impl ActiveThread {
|
||||
}),
|
||||
))
|
||||
})
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().pt_0p5().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
});
|
||||
@@ -1970,6 +1976,7 @@ impl ActiveThread {
|
||||
h_flex()
|
||||
.p_2p5()
|
||||
.gap_1()
|
||||
.items_end()
|
||||
.children(message_content)
|
||||
.when_some(editing_message_state, |this, state| {
|
||||
let focus_handle = state.editor.focus_handle(cx).clone();
|
||||
@@ -1983,6 +1990,7 @@ impl ActiveThread {
|
||||
)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Error)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2000,11 +2008,12 @@ impl ActiveThread {
|
||||
.child(
|
||||
IconButton::new(
|
||||
"confirm-edit-message",
|
||||
IconName::Check,
|
||||
IconName::Return,
|
||||
)
|
||||
.disabled(state.editor.read(cx).is_empty(cx))
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Success)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2024,9 +2033,6 @@ impl ActiveThread {
|
||||
)
|
||||
}),
|
||||
)
|
||||
.when(editing_message_state.is_none(), |this| {
|
||||
this.tooltip(Tooltip::text("Click To Edit"))
|
||||
})
|
||||
.on_click(cx.listener({
|
||||
let message_segments = message.segments.clone();
|
||||
move |this, _, window, cx| {
|
||||
@@ -2067,6 +2073,16 @@ impl ActiveThread {
|
||||
|
||||
let panel_background = cx.theme().colors().panel_background;
|
||||
|
||||
let backdrop = div()
|
||||
.id("backdrop")
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8)
|
||||
.on_click(cx.listener(Self::handle_cancel_click));
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.map(|parent| {
|
||||
@@ -2236,15 +2252,7 @@ impl ActiveThread {
|
||||
})
|
||||
.when(after_editing_message, |parent| {
|
||||
// Backdrop to dim out the whole thread below the editing user message
|
||||
parent.relative().child(
|
||||
div()
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8),
|
||||
)
|
||||
parent.relative().child(backdrop)
|
||||
})
|
||||
.into_any()
|
||||
}
|
||||
@@ -2359,6 +2367,7 @@ impl ActiveThread {
|
||||
move |el, range, metadata, _, cx| {
|
||||
let can_expand = metadata.line_count
|
||||
>= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
|
||||
if !can_expand {
|
||||
return el;
|
||||
}
|
||||
@@ -2366,6 +2375,7 @@ impl ActiveThread {
|
||||
let is_expanded = active_thread
|
||||
.read(cx)
|
||||
.is_codeblock_expanded(message_id, range.start);
|
||||
|
||||
if is_expanded {
|
||||
return el;
|
||||
}
|
||||
@@ -3388,14 +3398,14 @@ impl ActiveThread {
|
||||
self.expanded_code_blocks
|
||||
.get(&(message_id, ix))
|
||||
.copied()
|
||||
.unwrap_or(false)
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn toggle_codeblock_expanded(&mut self, message_id: MessageId, ix: usize) {
|
||||
let is_expanded = self
|
||||
.expanded_code_blocks
|
||||
.entry((message_id, ix))
|
||||
.or_insert(false);
|
||||
.or_insert(true);
|
||||
*is_expanded = !*is_expanded;
|
||||
}
|
||||
}
|
||||
@@ -3411,6 +3421,7 @@ impl Render for ActiveThread {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.on_mouse_move(cx.listener(|this, _, _, cx| {
|
||||
this.show_scrollbar = true;
|
||||
this.hide_scrollbar_later(cx);
|
||||
|
||||
@@ -85,7 +85,6 @@ actions!(
|
||||
KeepAll,
|
||||
Follow,
|
||||
ResetTrialUpsell,
|
||||
ResetTrialEndUpsell,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -18,8 +18,8 @@ use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageMod
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::{
|
||||
Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Scrollbar, ScrollbarState,
|
||||
Switch, SwitchColor, Tooltip, prelude::*,
|
||||
Disclosure, ElevationIndex, Indicator, Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip,
|
||||
prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
@@ -142,7 +142,7 @@ impl AgentConfiguration {
|
||||
.expanded_provider_configurations
|
||||
.get(&provider.id())
|
||||
.copied()
|
||||
.unwrap_or(true);
|
||||
.unwrap_or(false);
|
||||
|
||||
v_flex()
|
||||
.pt_3()
|
||||
@@ -201,12 +201,12 @@ impl AgentConfiguration {
|
||||
.on_click(cx.listener({
|
||||
let provider_id = provider.id().clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
let is_expanded = this
|
||||
.expanded_provider_configurations
|
||||
.entry(provider_id.clone())
|
||||
.or_insert(true);
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
*is_expanded = !*is_expanded;
|
||||
}
|
||||
})),
|
||||
),
|
||||
@@ -214,9 +214,9 @@ impl AgentConfiguration {
|
||||
)
|
||||
.when(is_expanded, |parent| match configuration_view {
|
||||
Some(configuration_view) => parent.child(configuration_view),
|
||||
None => parent.child(div().child(Label::new(format!(
|
||||
None => parent.child(Label::new(format!(
|
||||
"No configuration view for {provider_name}",
|
||||
)))),
|
||||
))),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -230,7 +230,8 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_4()
|
||||
.flex_1()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
@@ -331,7 +332,8 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2p5()
|
||||
.flex_1()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Headline::new("General Settings"))
|
||||
.child(self.render_command_permission(cx))
|
||||
.child(self.render_single_file_review(cx))
|
||||
@@ -344,18 +346,17 @@ impl AgentConfiguration {
|
||||
) -> impl IntoElement {
|
||||
let context_server_ids = self.context_server_store.read(cx).all_server_ids().clone();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.flex_1()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Model Context Protocol (MCP) Servers"))
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
.child(Label::new("Connect to context servers via the Model Context Protocol either via Zed extensions or directly.").color(Color::Muted)),
|
||||
)
|
||||
.children(
|
||||
context_server_ids.into_iter().map(|context_server_id| {
|
||||
@@ -630,9 +631,7 @@ impl Render for AgentConfiguration {
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(self.render_general_settings_section(cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_context_servers_section(window, cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_provider_configuration_section(cx)),
|
||||
)
|
||||
.child(
|
||||
|
||||
@@ -30,7 +30,6 @@ pub(crate) struct ConfigureContextServerModal {
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Configuration {
|
||||
NotAvailable,
|
||||
Required(ConfigurationRequiredState),
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use markdown::Markdown;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -66,8 +66,8 @@ use crate::ui::AgentOnboardingModal;
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, ContextStore, DeleteRecentlyOpenThread, ExpandMessageEditor,
|
||||
Follow, InlineAssistant, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff,
|
||||
OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, TextThreadStore, ThreadEvent,
|
||||
ToggleContextPicker, ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
OpenHistory, ResetTrialUpsell, TextThreadStore, ThreadEvent, ToggleContextPicker,
|
||||
ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
};
|
||||
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
@@ -157,10 +157,7 @@ pub fn init(cx: &mut App) {
|
||||
window.refresh();
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialUpsell, _window, cx| {
|
||||
TrialUpsell::set_dismissed(false, cx);
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| {
|
||||
TrialEndUpsell::set_dismissed(false, cx);
|
||||
set_trial_upsell_dismissed(false, cx);
|
||||
});
|
||||
},
|
||||
)
|
||||
@@ -1914,23 +1911,12 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn should_render_trial_end_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if TrialEndUpsell::dismissed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let plan = self.user_store.read(cx).current_plan();
|
||||
let has_previous_trial = self.user_store.read(cx).trial_started_at().is_some();
|
||||
|
||||
matches!(plan, Some(Plan::Free)) && has_previous_trial
|
||||
}
|
||||
|
||||
fn should_render_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if !matches!(self.active_view, ActiveView::Thread { .. }) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.hide_trial_upsell || TrialUpsell::dismissed() {
|
||||
if self.hide_trial_upsell || dismissed_trial_upsell() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1976,115 +1962,125 @@ impl AgentPanel {
|
||||
move |toggle_state, _window, cx| {
|
||||
let toggle_state_bool = toggle_state.selected();
|
||||
|
||||
TrialUpsell::set_dismissed(toggle_state_bool, cx);
|
||||
set_trial_upsell_dismissed(toggle_state_bool, cx);
|
||||
},
|
||||
);
|
||||
|
||||
let contents = div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(
|
||||
Label::new("Try Zed Pro for free for 14 days - no credit card required.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
"Use your own API keys or enable usage-based billing once you hit the cap.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
Some(
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
|
||||
.child(
|
||||
h_flex()
|
||||
div()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |this, cx| {
|
||||
this.hide_trial_upsell = true;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(VectorName::Grid, rems_from_px(441.), rems_from_px(167.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1)))
|
||||
)
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
Some(self.render_upsell_container(cx, contents))
|
||||
}
|
||||
|
||||
fn render_trial_end_upsell(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
if !self.should_render_trial_end_upsell(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
self.render_upsell_container(
|
||||
cx,
|
||||
div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(
|
||||
Headline::new("Your Zed Pro trial has expired.").size(HeadlineSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new("You've been automatically reset to the free plan.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(div())
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(VectorName::AiGrid, rems_from_px(400.), rems_from_px(92.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32)))
|
||||
)
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(cx.theme().colors().panel_background.alpha(0.01), 1.0),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
))
|
||||
)
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(Label::new("Try Zed Pro for free for 14 days - no credit card required.").size(LabelSize::Small))
|
||||
.child(Label::new("Use your own API keys or enable usage-based billing once you hit the cap.").color(Color::Muted))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.child(
|
||||
Button::new("dismiss-button", "Stay on Free")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |_this, cx| {
|
||||
TrialEndUpsell::set_dismissed(true, cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Upgrade to Zed Pro")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(
|
||||
cx,
|
||||
|this, cx| {
|
||||
let hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!("hidden: {}", hidden);
|
||||
this.hide_trial_upsell = true;
|
||||
let new_hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!(
|
||||
"new_hidden: {}",
|
||||
new_hidden
|
||||
);
|
||||
|
||||
cx.notify();
|
||||
},
|
||||
);
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -2092,91 +2088,6 @@ impl AgentPanel {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_upsell_container(&self, cx: &mut Context<Self>, content: Div) -> Div {
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
.child(
|
||||
div()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::Grid,
|
||||
rems_from_px(441.),
|
||||
rems_from_px(167.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1))),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::AiGrid,
|
||||
rems_from_px(400.),
|
||||
rems_from_px(92.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32))),
|
||||
),
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(
|
||||
cx.theme().colors().panel_background.alpha(0.01),
|
||||
1.0,
|
||||
),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
)),
|
||||
)
|
||||
.child(content),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_active_thread_or_empty_state(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
@@ -2224,6 +2135,7 @@ impl AgentPanel {
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.when(recent_history.is_empty(), |this| {
|
||||
let configuration_error_ref = &configuration_error;
|
||||
this.child(
|
||||
@@ -2894,7 +2806,6 @@ impl Render for AgentPanel {
|
||||
.on_action(cx.listener(Self::toggle_zoom))
|
||||
.child(self.render_toolbar(window, cx))
|
||||
.children(self.render_trial_upsell(window, cx))
|
||||
.children(self.render_trial_end_upsell(window, cx))
|
||||
.map(|parent| match &self.active_view {
|
||||
ActiveView::Thread { .. } => parent
|
||||
.relative()
|
||||
@@ -3082,14 +2993,25 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
struct TrialUpsell;
|
||||
const DISMISSED_TRIAL_UPSELL_KEY: &str = "dismissed-trial-upsell";
|
||||
|
||||
impl Dismissable for TrialUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-upsell";
|
||||
fn dismissed_trial_upsell() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_TRIAL_UPSELL_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
}
|
||||
|
||||
struct TrialEndUpsell;
|
||||
|
||||
impl Dismissable for TrialEndUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-end-upsell";
|
||||
fn set_trial_upsell_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_TRIAL_UPSELL_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_TRIAL_UPSELL_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -942,8 +942,8 @@ impl MentionLink {
|
||||
format!("[@{}]({}:{})", title, Self::THREAD, id)
|
||||
}
|
||||
ThreadContextEntry::Context { path, title } => {
|
||||
let filename = path.file_name().unwrap_or_default();
|
||||
let escaped_filename = urlencoding::encode(&filename.to_string_lossy()).to_string();
|
||||
let filename = path.file_name().unwrap_or_default().to_string_lossy();
|
||||
let escaped_filename = urlencoding::encode(&filename);
|
||||
format!(
|
||||
"[@{}]({}:{}{})",
|
||||
title,
|
||||
|
||||
@@ -84,6 +84,12 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the context strip has items to display
|
||||
pub fn has_context_items(&self, cx: &App) -> bool {
|
||||
self.context_store.read(cx).context().next().is_some()
|
||||
|| self.suggested_context(cx).is_some()
|
||||
}
|
||||
|
||||
fn added_contexts(&self, cx: &App) -> Vec<AddedContext> {
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
let project = workspace.read(cx).project().read(cx);
|
||||
@@ -104,14 +110,14 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_context(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
fn suggested_context(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
match self.suggest_context_kind {
|
||||
SuggestContextKind::File => self.suggested_file(cx),
|
||||
SuggestContextKind::Thread => self.suggested_thread(cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_file(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
fn suggested_file(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
let workspace = self.workspace.upgrade()?;
|
||||
let active_item = workspace.read(cx).active_item(cx)?;
|
||||
|
||||
@@ -138,7 +144,7 @@ impl ContextStrip {
|
||||
})
|
||||
}
|
||||
|
||||
fn suggested_thread(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
fn suggested_thread(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
if !self.context_picker.read(cx).allow_threads() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use db::kvp::Dismissable;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
@@ -34,6 +33,7 @@ use ui::utils::WithRemSize;
|
||||
use ui::{
|
||||
CheckboxWithLabel, IconButtonShape, KeyBinding, Popover, PopoverMenuHandle, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
@@ -451,7 +451,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
editor.move_to_end(&Default::default(), window, cx)
|
||||
});
|
||||
}
|
||||
} else {
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
@@ -722,7 +722,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
.child(CheckboxWithLabel::new(
|
||||
"dont-show-again",
|
||||
Label::new("Don't show again"),
|
||||
if RateLimitNotice::dismissed() {
|
||||
if dismissed_rate_limit_notice() {
|
||||
ui::ToggleState::Selected
|
||||
} else {
|
||||
ui::ToggleState::Unselected
|
||||
@@ -734,7 +734,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
ui::ToggleState::Selected => true,
|
||||
};
|
||||
|
||||
RateLimitNotice::set_dismissed(is_dismissed, cx);
|
||||
set_rate_limit_notice_dismissed(is_dismissed, cx)
|
||||
},
|
||||
))
|
||||
.child(
|
||||
@@ -974,7 +974,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
CodegenStatus::Error(error) => {
|
||||
if cx.has_flag::<ZedProFeatureFlag>()
|
||||
&& error.error_code() == proto::ErrorCode::RateLimitExceeded
|
||||
&& !RateLimitNotice::dismissed()
|
||||
&& !dismissed_rate_limit_notice()
|
||||
{
|
||||
self.show_rate_limit_notice = true;
|
||||
cx.notify();
|
||||
@@ -1180,10 +1180,27 @@ impl PromptEditor<TerminalCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
struct RateLimitNotice;
|
||||
const DISMISSED_RATE_LIMIT_NOTICE_KEY: &str = "dismissed-rate-limit-notice";
|
||||
|
||||
impl Dismissable for RateLimitNotice {
|
||||
const KEY: &'static str = "dismissed-rate-limit-notice";
|
||||
fn dismissed_rate_limit_notice() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
}
|
||||
|
||||
fn set_rate_limit_notice_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub enum CodegenStatus {
|
||||
|
||||
@@ -401,7 +401,7 @@ impl MessageEditor {
|
||||
fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.context_picker_menu_handle.is_deployed() {
|
||||
cx.propagate();
|
||||
} else {
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -191,7 +191,7 @@ impl TerminalInlineAssistant {
|
||||
};
|
||||
|
||||
self.prompt_history.retain(|prompt| *prompt != user_prompt);
|
||||
self.prompt_history.push_back(user_prompt.clone());
|
||||
self.prompt_history.push_back(user_prompt);
|
||||
if self.prompt_history.len() > PROMPT_HISTORY_MAX_LEN {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
@@ -2583,7 +2583,7 @@ impl Thread {
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.map(|user| user.github_login.clone());
|
||||
let client = self.project.read(cx).client().clone();
|
||||
let client = self.project.read(cx).client();
|
||||
let serialize_task = self.serialize(cx);
|
||||
|
||||
cx.background_executor()
|
||||
|
||||
@@ -260,10 +260,7 @@ impl ThreadHistory {
|
||||
}
|
||||
});
|
||||
|
||||
self.search_state = SearchState::Searching {
|
||||
query: query.clone(),
|
||||
_task: task,
|
||||
};
|
||||
self.search_state = SearchState::Searching { query, _task: task };
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use collections::HashMap;
|
||||
use component::ComponentId;
|
||||
use gpui::{App, Entity, WeakEntity};
|
||||
use linkme::distributed_slice;
|
||||
use std::sync::OnceLock;
|
||||
use ui::{AnyElement, Component, ComponentScope, Window};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -12,9 +12,15 @@ use crate::ActiveThread;
|
||||
pub type PreviewFn =
|
||||
fn(WeakEntity<Workspace>, Entity<ActiveThread>, &mut Window, &mut App) -> Option<AnyElement>;
|
||||
|
||||
/// Distributed slice for preview registration functions
|
||||
#[distributed_slice]
|
||||
pub static __ALL_AGENT_PREVIEWS: [fn() -> (ComponentId, PreviewFn)] = [..];
|
||||
pub struct AgentPreviewFn(fn() -> (ComponentId, PreviewFn));
|
||||
|
||||
impl AgentPreviewFn {
|
||||
pub const fn new(f: fn() -> (ComponentId, PreviewFn)) -> Self {
|
||||
Self(f)
|
||||
}
|
||||
}
|
||||
|
||||
inventory::collect!(AgentPreviewFn);
|
||||
|
||||
/// Trait that must be implemented by components that provide agent previews.
|
||||
pub trait AgentPreview: Component + Sized {
|
||||
@@ -36,16 +42,14 @@ pub trait AgentPreview: Component + Sized {
|
||||
#[macro_export]
|
||||
macro_rules! register_agent_preview {
|
||||
($type:ty) => {
|
||||
#[linkme::distributed_slice($crate::ui::preview::__ALL_AGENT_PREVIEWS)]
|
||||
static __REGISTER_AGENT_PREVIEW: fn() -> (
|
||||
component::ComponentId,
|
||||
$crate::ui::preview::PreviewFn,
|
||||
) = || {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
};
|
||||
inventory::submit! {
|
||||
$crate::ui::preview::AgentPreviewFn::new(|| {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -56,8 +60,8 @@ static AGENT_PREVIEW_REGISTRY: OnceLock<HashMap<ComponentId, PreviewFn>> = OnceL
|
||||
fn get_or_init_registry() -> &'static HashMap<ComponentId, PreviewFn> {
|
||||
AGENT_PREVIEW_REGISTRY.get_or_init(|| {
|
||||
let mut map = HashMap::default();
|
||||
for register_fn in __ALL_AGENT_PREVIEWS.iter() {
|
||||
let (id, preview_fn) = register_fn();
|
||||
for register_fn in inventory::iter::<AgentPreviewFn>() {
|
||||
let (id, preview_fn) = (register_fn.0)();
|
||||
map.insert(id, preview_fn);
|
||||
}
|
||||
map
|
||||
|
||||
@@ -34,6 +34,7 @@ pub enum AnthropicModelMode {
|
||||
pub enum Model {
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
|
||||
Claude3_5Sonnet,
|
||||
#[default]
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
@@ -41,21 +42,6 @@ pub enum Model {
|
||||
alias = "claude-3-7-sonnet-thinking-latest"
|
||||
)]
|
||||
Claude3_7SonnetThinking,
|
||||
#[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
|
||||
ClaudeOpus4,
|
||||
#[serde(
|
||||
rename = "claude-opus-4-thinking",
|
||||
alias = "claude-opus-4-thinking-latest"
|
||||
)]
|
||||
ClaudeOpus4Thinking,
|
||||
#[default]
|
||||
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
|
||||
ClaudeSonnet4,
|
||||
#[serde(
|
||||
rename = "claude-sonnet-4-thinking",
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
|
||||
Claude3_5Haiku,
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
|
||||
@@ -103,14 +89,6 @@ impl Model {
|
||||
Ok(Self::Claude3Sonnet)
|
||||
} else if id.starts_with("claude-3-haiku") {
|
||||
Ok(Self::Claude3Haiku)
|
||||
} else if id.starts_with("claude-opus-4-thinking") {
|
||||
Ok(Self::ClaudeOpus4Thinking)
|
||||
} else if id.starts_with("claude-opus-4") {
|
||||
Ok(Self::ClaudeOpus4)
|
||||
} else if id.starts_with("claude-sonnet-4-thinking") {
|
||||
Ok(Self::ClaudeSonnet4Thinking)
|
||||
} else if id.starts_with("claude-sonnet-4") {
|
||||
Ok(Self::ClaudeSonnet4)
|
||||
} else {
|
||||
Err(anyhow!("invalid model id"))
|
||||
}
|
||||
@@ -118,10 +96,6 @@ impl Model {
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 => "claude-opus-4-latest",
|
||||
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
|
||||
Model::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
@@ -136,8 +110,6 @@ impl Model {
|
||||
/// The id of the model that should be used for making API requests
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
@@ -150,10 +122,6 @@ impl Model {
|
||||
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Model::ClaudeOpus4 => "Claude 4 Opus",
|
||||
Model::ClaudeOpus4Thinking => "Claude 4 Opus Thinking",
|
||||
Model::ClaudeSonnet4 => "Claude 4 Sonnet",
|
||||
Model::ClaudeSonnet4Thinking => "Claude 4 Sonnet Thinking",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
@@ -169,11 +137,7 @@ impl Model {
|
||||
|
||||
pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -192,11 +156,7 @@ impl Model {
|
||||
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
@@ -213,11 +173,7 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking => 8_192,
|
||||
| Self::Claude3_5Haiku => 8_192,
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => max_output_tokens.unwrap_or(4_096),
|
||||
@@ -226,11 +182,7 @@ impl Model {
|
||||
|
||||
pub fn default_temperature(&self) -> f32 {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku
|
||||
@@ -249,14 +201,10 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => AnthropicModelMode::Default,
|
||||
Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4Thinking => AnthropicModelMode::Thinking {
|
||||
Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
Self::Custom { mode, .. } => mode.clone(),
|
||||
|
||||
@@ -163,8 +163,10 @@ impl AskPassSession {
|
||||
#[cfg(unix)]
|
||||
fn get_shell_safe_zed_path() -> anyhow::Result<String> {
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("Failed to figure out current executable path for use in askpass")?
|
||||
.context("Failed to determine current executable path for use in askpass")?
|
||||
.to_string_lossy()
|
||||
// see https://github.com/rust-lang/rust/issues/69343
|
||||
.trim_end_matches(" (deleted)")
|
||||
.to_string();
|
||||
|
||||
// NOTE: this was previously enabled, however, it caused errors when it shouldn't have
|
||||
|
||||
@@ -3044,7 +3044,7 @@ fn invoked_slash_command_fold_placeholder(
|
||||
.gap_2()
|
||||
.bg(cx.theme().colors().surface_background)
|
||||
.rounded_sm()
|
||||
.child(Label::new(format!("/{}", command.name.clone())))
|
||||
.child(Label::new(format!("/{}", command.name)))
|
||||
.map(|parent| match &command.status {
|
||||
InvokedSlashCommandStatus::Running(_) => {
|
||||
parent.child(Icon::new(IconName::ArrowCircle).with_animation(
|
||||
|
||||
@@ -94,6 +94,7 @@ pub struct AssistantSettings {
|
||||
pub single_file_review: bool,
|
||||
pub model_parameters: Vec<LanguageModelParameters>,
|
||||
pub preferred_completion_mode: CompletionMode,
|
||||
pub enable_feedback: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -261,6 +262,7 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
|
||||
},
|
||||
@@ -291,6 +293,7 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
None => AssistantSettingsContentV2::default(),
|
||||
}
|
||||
@@ -573,6 +576,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -647,6 +651,10 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: normal
|
||||
preferred_completion_mode: Option<CompletionMode>,
|
||||
/// Whether to show thumb buttons for feedback in the agent panel.
|
||||
///
|
||||
/// Default: true
|
||||
enable_feedback: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
|
||||
@@ -684,7 +692,7 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
schemars::schema::SchemaObject {
|
||||
enum_values: Some(vec![
|
||||
"anthropic".into(),
|
||||
"bedrock".into(),
|
||||
"amazon-bedrock".into(),
|
||||
"google".into(),
|
||||
"lmstudio".into(),
|
||||
"ollama".into(),
|
||||
@@ -853,6 +861,7 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.preferred_completion_mode,
|
||||
value.preferred_completion_mode,
|
||||
);
|
||||
merge(&mut settings.enable_feedback, value.enable_feedback);
|
||||
|
||||
settings
|
||||
.model_parameters
|
||||
@@ -989,6 +998,7 @@ mod tests {
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
enable_feedback: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
},
|
||||
|
||||
@@ -35,7 +35,6 @@ indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
markdown.workspace = true
|
||||
open.workspace = true
|
||||
|
||||
@@ -15,7 +15,7 @@ use gpui::{AppContext, TestAppContext};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, SelectedModel,
|
||||
};
|
||||
use project::Project;
|
||||
use rand::prelude::*;
|
||||
@@ -25,6 +25,7 @@ use std::{
|
||||
cmp::Reverse,
|
||||
fmt::{self, Display},
|
||||
io::Write as _,
|
||||
str::FromStr,
|
||||
sync::mpsc,
|
||||
};
|
||||
use util::path;
|
||||
@@ -1216,7 +1217,7 @@ fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usiz
|
||||
passed_count as f64 / evaluated_count as f64
|
||||
};
|
||||
print!(
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}%)",
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}% passed)",
|
||||
evaluated_count,
|
||||
iterations,
|
||||
passed_ratio * 100.0
|
||||
@@ -1255,13 +1256,21 @@ impl EditAgentTest {
|
||||
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let agent_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_AGENT_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let judge_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_JUDGE_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let (agent_model, judge_model) = cx
|
||||
.update(|cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let agent_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
let judge_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
let agent_model = Self::load_model(&agent_model, cx).await;
|
||||
let judge_model = Self::load_model(&judge_model, cx).await;
|
||||
(agent_model.unwrap(), judge_model.unwrap())
|
||||
})
|
||||
})
|
||||
@@ -1276,15 +1285,17 @@ impl EditAgentTest {
|
||||
}
|
||||
|
||||
async fn load_model(
|
||||
provider: &str,
|
||||
id: &str,
|
||||
selected_model: &SelectedModel,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Arc<dyn LanguageModel>> {
|
||||
let (provider, model) = cx.update(|cx| {
|
||||
let models = LanguageModelRegistry::read_global(cx);
|
||||
let model = models
|
||||
.available_models(cx)
|
||||
.find(|model| model.provider_id().0 == provider && model.id().0 == id)
|
||||
.find(|model| {
|
||||
model.provider_id() == selected_model.provider
|
||||
&& model.id() == selected_model.model
|
||||
})
|
||||
.unwrap();
|
||||
let provider = models.provider(&model.provider_id()).unwrap();
|
||||
(provider, model)
|
||||
|
||||
@@ -1249,7 +1249,7 @@ pub struct ActiveDiagnosticGroup {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
|
||||
pub(crate) enum ActiveDiagnostic {
|
||||
None,
|
||||
All,
|
||||
|
||||
@@ -22,7 +22,7 @@ use language::{
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::{Project, ProjectPath};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
@@ -86,7 +86,7 @@ pub struct EditFileToolInput {
|
||||
pub mode: EditFileMode,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EditFileMode {
|
||||
Edit,
|
||||
@@ -171,9 +171,12 @@ impl Tool for EditFileTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let project_path = match resolve_path(&input, project.clone(), cx) {
|
||||
Ok(path) => path,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Path {} not found in project",
|
||||
input.path.display()
|
||||
)))
|
||||
.into();
|
||||
};
|
||||
|
||||
let card = window.and_then(|window| {
|
||||
@@ -196,6 +199,20 @@ impl Tool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let exists = buffer.read_with(cx, |buffer, _| {
|
||||
buffer
|
||||
.file()
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
})?;
|
||||
let create_or_overwrite = match input.mode {
|
||||
EditFileMode::Create | EditFileMode::Overwrite => true,
|
||||
_ => false,
|
||||
};
|
||||
if !create_or_overwrite && !exists {
|
||||
return Err(anyhow!("{} not found", input.path.display()));
|
||||
}
|
||||
|
||||
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let old_text = cx
|
||||
.background_spawn({
|
||||
@@ -204,15 +221,15 @@ impl Tool for EditFileTool {
|
||||
})
|
||||
.await;
|
||||
|
||||
let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) {
|
||||
edit_agent.edit(
|
||||
let (output, mut events) = if create_or_overwrite {
|
||||
edit_agent.overwrite(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
edit_agent.overwrite(
|
||||
edit_agent.edit(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
@@ -332,72 +349,6 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate that the file path is valid, meaning:
|
||||
///
|
||||
/// - For `edit` and `overwrite`, the path must point to an existing file.
|
||||
/// - For `create`, the file must not already exist, but it's parent dir must exist.
|
||||
fn resolve_path(
|
||||
input: &EditFileToolInput,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Result<ProjectPath> {
|
||||
let project = project.read(cx);
|
||||
|
||||
match input.mode {
|
||||
EditFileMode::Edit | EditFileMode::Overwrite => {
|
||||
let path = project
|
||||
.find_project_path(&input.path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
let entry = project
|
||||
.entry_for_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
if !entry.is_file() {
|
||||
return Err(anyhow!("Can't edit file: path is a directory"));
|
||||
}
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
EditFileMode::Create => {
|
||||
if let Some(path) = project.find_project_path(&input.path, cx) {
|
||||
if project.entry_for_path(&path, cx).is_some() {
|
||||
return Err(anyhow!("Can't create file: file already exists"));
|
||||
}
|
||||
}
|
||||
|
||||
let parent_path = input
|
||||
.path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Can't create file: incorrect path"))?;
|
||||
|
||||
let parent_project_path = project.find_project_path(&parent_path, cx);
|
||||
|
||||
let parent_entry = parent_project_path
|
||||
.as_ref()
|
||||
.and_then(|path| project.entry_for_path(&path, cx))
|
||||
.ok_or_else(|| anyhow!("Can't create file: parent directory doesn't exist"))?;
|
||||
|
||||
if !parent_entry.is_dir() {
|
||||
return Err(anyhow!("Can't create file: parent is not a directory"));
|
||||
}
|
||||
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Can't create file: invalid filename"))?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
..parent
|
||||
});
|
||||
|
||||
new_file_path.ok_or_else(|| anyhow!("Can't create file"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EditFileToolCard {
|
||||
path: PathBuf,
|
||||
editor: Entity<Editor>,
|
||||
@@ -917,10 +868,7 @@ async fn build_buffer_diff(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::result::Result;
|
||||
|
||||
use super::*;
|
||||
use client::TelemetrySettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -960,102 +908,10 @@ mod tests {
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
"root/nonexistent_file.txt not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Create;
|
||||
|
||||
let result = test_resolve_path(mode, "root/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "dir/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "dir/new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: parent directory doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Edit;
|
||||
|
||||
let path_with_root = "root/dir/subdir/existing.txt";
|
||||
let path_without_root = "dir/subdir/existing.txt";
|
||||
let result = test_resolve_path(mode, path_with_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, path_without_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path is a directory"
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_resolve_path(
|
||||
mode: &EditFileMode,
|
||||
path: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<ProjectPath, anyhow::Error> {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir": {
|
||||
"subdir": {
|
||||
"existing.txt": "hello"
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let input = EditFileToolInput {
|
||||
display_description: "Some edit".into(),
|
||||
path: path.into(),
|
||||
mode: mode.clone(),
|
||||
};
|
||||
|
||||
let result = cx.update(|cx| resolve_path(&input, project, cx));
|
||||
result
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: Result<ProjectPath, anyhow::Error>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_streaming_ui_text_with_path() {
|
||||
let input = json!({
|
||||
@@ -1128,7 +984,6 @@ mod tests {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] }
|
||||
base64.workspace = true
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -29,6 +30,7 @@ gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
http_client.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
httparse = "1.10"
|
||||
log.workspace = true
|
||||
paths.workspace = true
|
||||
parking_lot.workspace = true
|
||||
@@ -47,6 +49,7 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-native-tls = "0.3"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
mod socks;
|
||||
mod proxy;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
pub mod zed_urls;
|
||||
@@ -24,13 +24,13 @@ use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proxy::connect_proxy_stream;
|
||||
use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -1156,7 +1156,7 @@ impl Client {
|
||||
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
|
||||
let _guard = handle.enter();
|
||||
match proxy {
|
||||
Some(proxy) => connect_socks_proxy_stream(&proxy, rpc_host).await?,
|
||||
Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?,
|
||||
None => Box::new(TcpStream::connect(rpc_host).await?),
|
||||
}
|
||||
};
|
||||
|
||||
66
crates/client/src/proxy.rs
Normal file
66
crates/client/src/proxy.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
//! client proxy
|
||||
|
||||
mod http_proxy;
|
||||
mod socks_proxy;
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http_client::Url;
|
||||
use http_proxy::{HttpProxyType, connect_http_proxy_stream, parse_http_proxy};
|
||||
use socks_proxy::{SocksVersion, connect_socks_proxy_stream, parse_socks_proxy};
|
||||
|
||||
pub(crate) async fn connect_proxy_stream(
|
||||
proxy: &Url,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let Some(((proxy_domain, proxy_port), proxy_type)) = parse_proxy_type(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
let stream = tokio::net::TcpStream::connect((proxy_domain.as_str(), proxy_port))
|
||||
.await
|
||||
.context("Failed to connect to proxy")?;
|
||||
|
||||
let proxy_stream = match proxy_type {
|
||||
ProxyType::SocksProxy(proxy) => connect_socks_proxy_stream(stream, proxy, rpc_host).await?,
|
||||
ProxyType::HttpProxy(proxy) => {
|
||||
connect_http_proxy_stream(stream, proxy, rpc_host, &proxy_domain).await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proxy_stream)
|
||||
}
|
||||
|
||||
enum ProxyType<'t> {
|
||||
SocksProxy(SocksVersion<'t>),
|
||||
HttpProxy(HttpProxyType<'t>),
|
||||
}
|
||||
|
||||
fn parse_proxy_type<'t>(proxy: &'t Url) -> Option<((String, u16), ProxyType<'t>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
let proxy_type = match scheme {
|
||||
scheme if scheme.starts_with("socks") => {
|
||||
Some(ProxyType::SocksProxy(parse_socks_proxy(scheme, proxy)))
|
||||
}
|
||||
scheme if scheme.starts_with("http") => {
|
||||
Some(ProxyType::HttpProxy(parse_http_proxy(scheme, proxy)))
|
||||
}
|
||||
_ => None,
|
||||
}?;
|
||||
|
||||
Some(((host, port), proxy_type))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
171
crates/client/src/proxy/http_proxy.rs
Normal file
171
crates/client/src/proxy/http_proxy.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
use anyhow::{Context, Result};
|
||||
use base64::Engine;
|
||||
use httparse::{EMPTY_HEADER, Response};
|
||||
use tokio::{
|
||||
io::{AsyncBufReadExt, AsyncWriteExt, BufStream},
|
||||
net::TcpStream,
|
||||
};
|
||||
use tokio_native_tls::{TlsConnector, native_tls};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
pub(super) enum HttpProxyType<'t> {
|
||||
HTTP(Option<HttpProxyAuthorization<'t>>),
|
||||
HTTPS(Option<HttpProxyAuthorization<'t>>),
|
||||
}
|
||||
|
||||
pub(super) struct HttpProxyAuthorization<'t> {
|
||||
username: &'t str,
|
||||
password: &'t str,
|
||||
}
|
||||
|
||||
pub(super) fn parse_http_proxy<'t>(scheme: &str, proxy: &'t Url) -> HttpProxyType<'t> {
|
||||
let auth = proxy.password().map(|password| HttpProxyAuthorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
if scheme.starts_with("https") {
|
||||
HttpProxyType::HTTPS(auth)
|
||||
} else {
|
||||
HttpProxyType::HTTP(auth)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn connect_http_proxy_stream(
|
||||
stream: TcpStream,
|
||||
http_proxy: HttpProxyType<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
match http_proxy {
|
||||
HttpProxyType::HTTP(auth) => http_connect(stream, rpc_host, auth).await,
|
||||
HttpProxyType::HTTPS(auth) => https_connect(stream, rpc_host, auth, proxy_domain).await,
|
||||
}
|
||||
.context("error connecting to http/https proxy")
|
||||
}
|
||||
|
||||
async fn http_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut stream = BufStream::new(stream);
|
||||
let request = make_request(target, auth);
|
||||
stream.write_all(request.as_bytes()).await?;
|
||||
stream.flush().await?;
|
||||
check_response(&mut stream).await?;
|
||||
Ok(Box::new(stream))
|
||||
}
|
||||
|
||||
async fn https_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let tls_connector = TlsConnector::from(native_tls::TlsConnector::new()?);
|
||||
let stream = tls_connector.connect(proxy_domain, stream).await?;
|
||||
http_connect(stream, target, auth).await
|
||||
}
|
||||
|
||||
fn make_request(target: (&str, u16), auth: Option<HttpProxyAuthorization<'_>>) -> String {
|
||||
let (host, port) = target;
|
||||
let mut request = format!(
|
||||
"CONNECT {host}:{port} HTTP/1.1\r\nHost: {host}:{port}\r\nProxy-Connection: Keep-Alive\r\n"
|
||||
);
|
||||
if let Some(HttpProxyAuthorization { username, password }) = auth {
|
||||
let auth =
|
||||
base64::prelude::BASE64_STANDARD.encode(format!("{username}:{password}").as_bytes());
|
||||
let auth = format!("Proxy-Authorization: Basic {auth}\r\n");
|
||||
request.push_str(&auth);
|
||||
}
|
||||
request.push_str("\r\n");
|
||||
request
|
||||
}
|
||||
|
||||
async fn check_response<T>(stream: &mut BufStream<T>) -> Result<()>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let response = recv_response(stream).await?;
|
||||
let mut dummy_headers = [EMPTY_HEADER; MAX_RESPONSE_HEADERS];
|
||||
let mut parser = Response::new(&mut dummy_headers);
|
||||
parser.parse(response.as_bytes())?;
|
||||
|
||||
match parser.code {
|
||||
Some(code) => {
|
||||
if code == 200 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with HTTP code: {code}"
|
||||
))
|
||||
}
|
||||
}
|
||||
None => Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with no HTTP code: {}",
|
||||
parser.reason.unwrap_or("Unknown reason")
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_RESPONSE_HEADER_LENGTH: usize = 4096;
|
||||
const MAX_RESPONSE_HEADERS: usize = 16;
|
||||
|
||||
async fn recv_response<T>(stream: &mut BufStream<T>) -> Result<String>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut response = String::new();
|
||||
loop {
|
||||
if stream.read_line(&mut response).await? == 0 {
|
||||
return Err(anyhow::anyhow!("End of stream"));
|
||||
}
|
||||
|
||||
if MAX_RESPONSE_HEADER_LENGTH < response.len() {
|
||||
return Err(anyhow::anyhow!("Maximum response header length exceeded"));
|
||||
}
|
||||
|
||||
if response.ends_with("\r\n\r\n") {
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::Url;
|
||||
|
||||
use super::{HttpProxyAuthorization, HttpProxyType, parse_http_proxy};
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy() {
|
||||
let proxy = Url::parse("http://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, HttpProxyType::HTTP(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy_with_auth() {
|
||||
let proxy = Url::parse("http://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
HttpProxyType::HTTP(Some(HttpProxyAuthorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
}))
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,19 @@
|
||||
//! socks proxy
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http_client::Url;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
/// Identification to a Socks V4 Proxy
|
||||
struct Socks4Identification<'a> {
|
||||
pub(super) struct Socks4Identification<'a> {
|
||||
user_id: &'a str,
|
||||
}
|
||||
|
||||
/// Authorization to a Socks V5 Proxy
|
||||
struct Socks5Authorization<'a> {
|
||||
pub(super) struct Socks5Authorization<'a> {
|
||||
username: &'a str,
|
||||
password: &'a str,
|
||||
}
|
||||
@@ -18,45 +22,50 @@ struct Socks5Authorization<'a> {
|
||||
///
|
||||
/// V4 allows idenfication using a user_id
|
||||
/// V5 allows authorization using a username and password
|
||||
enum SocksVersion<'a> {
|
||||
pub(super) enum SocksVersion<'a> {
|
||||
V4(Option<Socks4Identification<'a>>),
|
||||
V5(Option<Socks5Authorization<'a>>),
|
||||
}
|
||||
|
||||
pub(crate) async fn connect_socks_proxy_stream(
|
||||
proxy: &Url,
|
||||
pub(super) fn parse_socks_proxy<'t>(scheme: &str, proxy: &'t Url) -> SocksVersion<'t> {
|
||||
if scheme.starts_with("socks4") {
|
||||
let identification = match proxy.username() {
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
} else {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn connect_socks_proxy_stream(
|
||||
stream: TcpStream,
|
||||
socks_version: SocksVersion<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let Some((socks_proxy, version)) = parse_socks_proxy(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
let stream = tokio::net::TcpStream::connect(socks_proxy)
|
||||
.await
|
||||
.context("Failed to connect to socks proxy")?;
|
||||
|
||||
let socks: Box<dyn AsyncReadWrite> = match version {
|
||||
match socks_version {
|
||||
SocksVersion::V4(None) => {
|
||||
let socks = Socks4Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Box::new(socks)
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id })) => {
|
||||
let socks = Socks4Stream::connect_with_userid_and_socket(stream, rpc_host, user_id)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Box::new(socks)
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(None) => {
|
||||
let socks = Socks5Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Box::new(socks)
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(Some(Socks5Authorization { username, password })) => {
|
||||
let socks = Socks5Stream::connect_with_password_and_socket(
|
||||
@@ -64,44 +73,9 @@ pub(crate) async fn connect_socks_proxy_stream(
|
||||
)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Box::new(socks)
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(socks)
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: &Url) -> Option<((String, u16), SocksVersion<'_>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
let identification = match proxy.username() {
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
} else if scheme.starts_with("socks") {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
|
||||
Some(((host, port), socks_version))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -113,20 +87,18 @@ mod tests {
|
||||
#[test]
|
||||
fn parse_socks4() {
|
||||
let proxy = Url::parse("socks4://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V4(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks4_with_identification() {
|
||||
let proxy = Url::parse("socks4://userid@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id: "userid" }))
|
||||
@@ -136,20 +108,18 @@ mod tests {
|
||||
#[test]
|
||||
fn parse_socks5() {
|
||||
let proxy = Url::parse("socks5://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V5(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks5_with_authorization() {
|
||||
let proxy = Url::parse("socks5://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5(Some(Socks5Authorization {
|
||||
@@ -158,19 +128,4 @@ mod tests {
|
||||
}))
|
||||
))
|
||||
}
|
||||
|
||||
/// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
/// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
/// so any fallback could expose users to significant risks.
|
||||
#[tokio::test]
|
||||
async fn fails_on_bad_proxy() {
|
||||
// Should fail connecting because http is not a valid Socks proxy scheme
|
||||
let proxy = Url::parse("http://localhost:2313").unwrap();
|
||||
|
||||
let result = connect_socks_proxy_stream(&proxy, ("test", 1080)).await;
|
||||
match result {
|
||||
Err(e) => assert_eq!(e.to_string(), "Parsing proxy url failed"),
|
||||
Ok(_) => panic!("Connecting on bad proxy should fail"),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -137,18 +137,14 @@ pub fn os_version() -> String {
|
||||
log::error!("Failed to load /etc/os-release, /usr/lib/os-release");
|
||||
"".to_string()
|
||||
};
|
||||
let mut name = "unknown".to_string();
|
||||
let mut version = "unknown".to_string();
|
||||
let mut name = "unknown";
|
||||
let mut version = "unknown";
|
||||
|
||||
for line in content.lines() {
|
||||
if line.starts_with("ID=") {
|
||||
name = line.trim_start_matches("ID=").trim_matches('"').to_string();
|
||||
}
|
||||
if line.starts_with("VERSION_ID=") {
|
||||
version = line
|
||||
.trim_start_matches("VERSION_ID=")
|
||||
.trim_matches('"')
|
||||
.to_string();
|
||||
match line.split_once('=') {
|
||||
Some(("ID", val)) => name = val.trim_matches('"'),
|
||||
Some(("VERSION_ID", val)) => version = val.trim_matches('"'),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,7 +218,7 @@ impl Telemetry {
|
||||
cx.background_spawn({
|
||||
let state = state.clone();
|
||||
let os_version = os_version();
|
||||
state.lock().os_version = Some(os_version.clone());
|
||||
state.lock().os_version = Some(os_version);
|
||||
async move {
|
||||
if let Some(tempfile) = File::create(Self::log_file_path()).log_err() {
|
||||
state.lock().log_file = Some(tempfile);
|
||||
@@ -369,7 +365,7 @@ impl Telemetry {
|
||||
telemetry::event!(
|
||||
"Editor Edited",
|
||||
duration = duration,
|
||||
environment = environment.to_string(),
|
||||
environment = environment,
|
||||
is_via_ssh = is_via_ssh
|
||||
);
|
||||
}
|
||||
@@ -431,9 +427,8 @@ impl Telemetry {
|
||||
|
||||
if state.flush_events_task.is_none() {
|
||||
let this = self.clone();
|
||||
let executor = self.executor.clone();
|
||||
state.flush_events_task = Some(self.executor.spawn(async move {
|
||||
executor.timer(FLUSH_INTERVAL).await;
|
||||
this.executor.timer(FLUSH_INTERVAL).await;
|
||||
this.flush_events().detach();
|
||||
}));
|
||||
}
|
||||
@@ -484,12 +479,12 @@ impl Telemetry {
|
||||
self: &Arc<Self>,
|
||||
// We take in the JSON bytes buffer so we can reuse the existing allocation.
|
||||
mut json_bytes: Vec<u8>,
|
||||
event_request: EventRequestBody,
|
||||
event_request: &EventRequestBody,
|
||||
) -> Result<Request<AsyncBody>> {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, &event_request)?;
|
||||
serde_json::to_writer(&mut json_bytes, event_request)?;
|
||||
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or_default();
|
||||
|
||||
Ok(Request::builder()
|
||||
.method(Method::POST)
|
||||
@@ -506,7 +501,7 @@ impl Telemetry {
|
||||
pub fn flush_events(self: &Arc<Self>) -> Task<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.first_event_date_time = None;
|
||||
let mut events = mem::take(&mut state.events_queue);
|
||||
let events = mem::take(&mut state.events_queue);
|
||||
state.flush_events_task.take();
|
||||
drop(state);
|
||||
if events.is_empty() {
|
||||
@@ -519,7 +514,7 @@ impl Telemetry {
|
||||
let mut json_bytes = Vec::new();
|
||||
|
||||
if let Some(file) = &mut this.state.lock().log_file {
|
||||
for event in &mut events {
|
||||
for event in &events {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event)?;
|
||||
file.write_all(&json_bytes)?;
|
||||
@@ -546,7 +541,7 @@ impl Telemetry {
|
||||
}
|
||||
};
|
||||
|
||||
let request = this.build_request(json_bytes, request_body)?;
|
||||
let request = this.build_request(json_bytes, &request_body)?;
|
||||
let response = this.http_client.send(request).await?;
|
||||
if response.status() != 200 {
|
||||
log::error!("Failed to send events: HTTP {:?}", response.status());
|
||||
|
||||
@@ -543,7 +543,7 @@ pub struct MembershipUpdated {
|
||||
|
||||
/// The result of setting a member's role.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
|
||||
pub enum SetMemberRoleResult {
|
||||
InviteUpdated(Channel),
|
||||
MembershipUpdated(MembershipUpdated),
|
||||
|
||||
@@ -36,6 +36,7 @@ use util::{ResultExt as _, maybe};
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const REVISION: Option<&'static str> = option_env!("GITHUB_SHA");
|
||||
|
||||
#[expect(clippy::result_large_err)]
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
if let Err(error) = env::load_dotenv() {
|
||||
|
||||
@@ -36,8 +36,8 @@ fn room_participants(room: &Entity<Room>, cx: &mut TestAppContext) -> RoomPartic
|
||||
room.read_with(cx, |room, _| {
|
||||
let mut remote = room
|
||||
.remote_participants()
|
||||
.iter()
|
||||
.map(|(_, participant)| participant.user.github_login.clone())
|
||||
.values()
|
||||
.map(|participant| participant.user.github_login.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let mut pending = room
|
||||
.pending_participants()
|
||||
|
||||
@@ -2517,7 +2517,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
|
||||
@@ -2526,7 +2526,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -2550,7 +2550,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
|
||||
@@ -2559,7 +2559,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -2583,7 +2583,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
|
||||
@@ -2592,7 +2592,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -2616,7 +2616,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
let breakpoints_b = editor_b.update(cx_b, |editor, cx| {
|
||||
@@ -2625,7 +2625,7 @@ async fn test_add_breakpoints(cx_a: &mut TestAppContext, cx_b: &mut TestAppConte
|
||||
.clone()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
|
||||
@@ -1059,7 +1059,7 @@ impl Render for ChatPanel {
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"@{}",
|
||||
user_being_replied_to.github_login.clone()
|
||||
user_being_replied_to.github_login
|
||||
))
|
||||
.size(LabelSize::Small)
|
||||
.weight(FontWeight::BOLD),
|
||||
|
||||
@@ -7,11 +7,11 @@ use crate::notifications::collab_notification::CollabNotification;
|
||||
pub struct CollabNotificationStory;
|
||||
|
||||
impl Render for CollabNotificationStory {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let window_container = |width, height| div().w(px(width)).h(px(height));
|
||||
|
||||
Story::container()
|
||||
.child(Story::title_for::<CollabNotification>())
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<CollabNotification>(cx))
|
||||
.child(
|
||||
StorySection::new().child(StoryItem::new(
|
||||
"Incoming Call Notification",
|
||||
|
||||
@@ -14,7 +14,7 @@ path = "src/component.rs"
|
||||
[dependencies]
|
||||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
linkme.workspace = true
|
||||
inventory.workspace = true
|
||||
parking_lot.workspace = true
|
||||
strum.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -9,13 +9,12 @@
|
||||
|
||||
mod component_layout;
|
||||
|
||||
pub use component_layout::*;
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub use component_layout::*;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{AnyElement, App, SharedString, Window};
|
||||
use linkme::distributed_slice;
|
||||
use parking_lot::RwLock;
|
||||
use strum::{Display, EnumString};
|
||||
|
||||
@@ -24,12 +23,27 @@ pub fn components() -> ComponentRegistry {
|
||||
}
|
||||
|
||||
pub fn init() {
|
||||
let component_fns: Vec<_> = __ALL_COMPONENTS.iter().cloned().collect();
|
||||
for f in component_fns {
|
||||
f();
|
||||
for f in inventory::iter::<ComponentFn>() {
|
||||
(f.0)();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ComponentFn(fn());
|
||||
|
||||
impl ComponentFn {
|
||||
pub const fn new(f: fn()) -> Self {
|
||||
Self(f)
|
||||
}
|
||||
}
|
||||
|
||||
inventory::collect!(ComponentFn);
|
||||
|
||||
/// Private internals for macros.
|
||||
#[doc(hidden)]
|
||||
pub mod __private {
|
||||
pub use inventory;
|
||||
}
|
||||
|
||||
pub fn register_component<T: Component>() {
|
||||
let id = T::id();
|
||||
let metadata = ComponentMetadata {
|
||||
@@ -46,9 +60,6 @@ pub fn register_component<T: Component>() {
|
||||
data.components.insert(id, metadata);
|
||||
}
|
||||
|
||||
#[distributed_slice]
|
||||
pub static __ALL_COMPONENTS: [fn()] = [..];
|
||||
|
||||
pub static COMPONENT_DATA: LazyLock<RwLock<ComponentRegistry>> =
|
||||
LazyLock::new(|| RwLock::new(ComponentRegistry::default()));
|
||||
|
||||
|
||||
@@ -578,15 +578,6 @@ async fn stream_completion(
|
||||
api_key: String,
|
||||
request: Request,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
let is_vision_request = request.messages.last().map_or(false, |message| match message {
|
||||
ChatMessage::User { content }
|
||||
| ChatMessage::Assistant { content, .. }
|
||||
| ChatMessage::Tool { content, .. } => {
|
||||
matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
|
||||
}
|
||||
_ => false,
|
||||
});
|
||||
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(COPILOT_CHAT_COMPLETION_URL)
|
||||
@@ -600,7 +591,7 @@ async fn stream_completion(
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Copilot-Integration-Id", "vscode-chat")
|
||||
.header("Copilot-Vision-Request", is_vision_request.to_string());
|
||||
.header("Copilot-Vision-Request", "true");
|
||||
|
||||
let is_streaming = request.stream;
|
||||
|
||||
|
||||
@@ -32,17 +32,15 @@ pub enum DapStatus {
|
||||
Failed { error: String },
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait DapDelegate: Send + Sync + 'static {
|
||||
#[async_trait(?Send)]
|
||||
pub trait DapDelegate {
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn worktree_root_path(&self) -> &Path;
|
||||
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||
fn node_runtime(&self) -> NodeRuntime;
|
||||
fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore>;
|
||||
fn fs(&self) -> Arc<dyn Fs>;
|
||||
fn output_to_console(&self, msg: String);
|
||||
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String>;
|
||||
fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn shell_env(&self) -> collections::HashMap<String, String>;
|
||||
}
|
||||
|
||||
@@ -415,7 +413,7 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
@@ -474,7 +472,7 @@ impl DebugAdapter for FakeAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
_: &Arc<dyn DapDelegate>,
|
||||
_: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
|
||||
@@ -6,8 +6,6 @@ pub mod proto_conversions;
|
||||
mod registry;
|
||||
pub mod transport;
|
||||
|
||||
use std::net::Ipv4Addr;
|
||||
|
||||
pub use dap_types::*;
|
||||
pub use registry::{DapLocator, DapRegistry};
|
||||
pub use task::DebugRequest;
|
||||
@@ -18,19 +16,3 @@ pub type StackFrameId = u64;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub use adapters::FakeAdapter;
|
||||
use task::TcpArgumentsTemplate;
|
||||
|
||||
pub async fn configure_tcp_connection(
|
||||
tcp_connection: TcpArgumentsTemplate,
|
||||
) -> anyhow::Result<(Ipv4Addr, u16, Option<u64>)> {
|
||||
let host = tcp_connection.host();
|
||||
let timeout = tcp_connection.timeout;
|
||||
|
||||
let port = if let Some(port) = tcp_connection.port {
|
||||
port
|
||||
} else {
|
||||
transport::TcpTransport::port(&tcp_connection).await?
|
||||
};
|
||||
|
||||
Ok((host, port, timeout))
|
||||
}
|
||||
|
||||
@@ -54,6 +54,10 @@ impl DapRegistry {
|
||||
pub fn add_adapter(&self, adapter: Arc<dyn DebugAdapter>) {
|
||||
let name = adapter.name();
|
||||
let _previous_value = self.0.write().adapters.insert(name, adapter);
|
||||
debug_assert!(
|
||||
_previous_value.is_none(),
|
||||
"Attempted to insert a new debug adapter when one is already registered"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn adapter_language(&self, adapter_name: &str) -> Option<LanguageName> {
|
||||
|
||||
@@ -61,7 +61,7 @@ impl CodeLldbDebugAdapter {
|
||||
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release =
|
||||
latest_github_release("vadimcn/codelldb", true, false, delegate.http_client()).await?;
|
||||
@@ -111,7 +111,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
@@ -129,7 +129,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
||||
self.name(),
|
||||
version.clone(),
|
||||
adapters::DownloadedFileType::Vsix,
|
||||
delegate.as_ref(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
let version_path =
|
||||
|
||||
@@ -6,7 +6,7 @@ mod php;
|
||||
mod python;
|
||||
mod ruby;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{net::Ipv4Addr, sync::Arc};
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
@@ -17,7 +17,6 @@ use dap::{
|
||||
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
||||
GithubRepo,
|
||||
},
|
||||
configure_tcp_connection,
|
||||
inline_value::{PythonInlineValueProvider, RustInlineValueProvider},
|
||||
};
|
||||
use gdb::GdbDebugAdapter;
|
||||
@@ -28,6 +27,7 @@ use php::PhpDebugAdapter;
|
||||
use python::PythonDebugAdapter;
|
||||
use ruby::RubyDebugAdapter;
|
||||
use serde_json::{Value, json};
|
||||
use task::TcpArgumentsTemplate;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.update_default_global(|registry: &mut DapRegistry, _cx| {
|
||||
@@ -45,6 +45,21 @@ pub fn init(cx: &mut App) {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) async fn configure_tcp_connection(
|
||||
tcp_connection: TcpArgumentsTemplate,
|
||||
) -> Result<(Ipv4Addr, u16, Option<u64>)> {
|
||||
let host = tcp_connection.host();
|
||||
let timeout = tcp_connection.timeout;
|
||||
|
||||
let port = if let Some(port) = tcp_connection.port {
|
||||
port
|
||||
} else {
|
||||
dap::transport::TcpTransport::port(&tcp_connection).await?
|
||||
};
|
||||
|
||||
Ok((host, port, timeout))
|
||||
}
|
||||
|
||||
trait ToDap {
|
||||
fn to_dap(&self) -> dap::StartDebuggingRequestArgumentsRequest;
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
@@ -76,7 +76,6 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
|
||||
let gdb_path = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.ok_or(anyhow!("Could not find gdb in path"));
|
||||
|
||||
|
||||
@@ -50,14 +50,13 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
_user_installed_path: Option<PathBuf>,
|
||||
_cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let delve_path = delegate
|
||||
.which(OsStr::new("dlv"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|p| p.to_string()))
|
||||
.ok_or(anyhow!("Dlv not found in path"))?;
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ impl JsDebugAdapter {
|
||||
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", "microsoft", Self::ADAPTER_NPM_NAME),
|
||||
@@ -82,7 +82,7 @@ impl JsDebugAdapter {
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
@@ -139,7 +139,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
@@ -151,7 +151,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
self.name(),
|
||||
version,
|
||||
adapters::DownloadedFileType::GzipTar,
|
||||
delegate.as_ref(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ impl PhpDebugAdapter {
|
||||
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", "xdebug", Self::ADAPTER_PACKAGE_NAME),
|
||||
@@ -66,7 +66,7 @@ impl PhpDebugAdapter {
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
@@ -126,7 +126,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
@@ -138,7 +138,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
self.name(),
|
||||
version,
|
||||
adapters::DownloadedFileType::Vsix,
|
||||
delegate.as_ref(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -52,26 +52,26 @@ impl PythonDebugAdapter {
|
||||
}
|
||||
async fn fetch_latest_adapter_version(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let github_repo = GithubRepo {
|
||||
repo_name: Self::ADAPTER_PACKAGE_NAME.into(),
|
||||
repo_owner: "microsoft".into(),
|
||||
};
|
||||
|
||||
adapters::fetch_latest_adapter_version_from_github(github_repo, delegate.as_ref()).await
|
||||
adapters::fetch_latest_adapter_version_from_github(github_repo, delegate).await
|
||||
}
|
||||
|
||||
async fn install_binary(
|
||||
&self,
|
||||
version: AdapterVersion,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<()> {
|
||||
let version_path = adapters::download_adapter_from_github(
|
||||
self.name(),
|
||||
version,
|
||||
adapters::DownloadedFileType::Zip,
|
||||
delegate.as_ref(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -93,7 +93,7 @@ impl PythonDebugAdapter {
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
@@ -128,18 +128,14 @@ impl PythonDebugAdapter {
|
||||
let python_path = if let Some(toolchain) = toolchain {
|
||||
Some(toolchain.path.to_string())
|
||||
} else {
|
||||
let mut name = None;
|
||||
|
||||
for cmd in BINARY_NAMES {
|
||||
name = delegate
|
||||
.which(OsStr::new(cmd))
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string());
|
||||
if name.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
name
|
||||
BINARY_NAMES
|
||||
.iter()
|
||||
.filter_map(|cmd| {
|
||||
delegate
|
||||
.which(OsStr::new(cmd))
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
})
|
||||
.find(|_| true)
|
||||
};
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
@@ -176,7 +172,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
cx: &mut AsyncApp,
|
||||
|
||||
@@ -8,7 +8,7 @@ use dap::{
|
||||
};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use language::LanguageName;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use std::path::PathBuf;
|
||||
use util::command::new_smol_command;
|
||||
|
||||
use crate::ToDap;
|
||||
@@ -32,7 +32,7 @@ impl DebugAdapter for RubyDebugAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
delegate: &dyn DapDelegate,
|
||||
definition: &DebugTaskDefinition,
|
||||
_user_installed_path: Option<PathBuf>,
|
||||
_cx: &mut AsyncApp,
|
||||
@@ -40,7 +40,7 @@ impl DebugAdapter for RubyDebugAdapter {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
let mut rdbg_path = adapter_path.join("rdbg");
|
||||
if !delegate.fs().is_file(&rdbg_path).await {
|
||||
match delegate.which("rdbg".as_ref()).await {
|
||||
match delegate.which("rdbg".as_ref()) {
|
||||
Some(path) => rdbg_path = path,
|
||||
None => {
|
||||
delegate.output_to_console(
|
||||
@@ -76,7 +76,7 @@ impl DebugAdapter for RubyDebugAdapter {
|
||||
format!("--port={}", port),
|
||||
format!("--host={}", host),
|
||||
];
|
||||
if delegate.which(launch.program.as_ref()).await.is_some() {
|
||||
if delegate.which(launch.program.as_ref()).is_some() {
|
||||
arguments.push("--command".to_string())
|
||||
}
|
||||
arguments.push(launch.program);
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use gpui::App;
|
||||
use sqlez_macros::sql;
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{define_connection, query, write_and_log};
|
||||
use crate::{define_connection, query};
|
||||
|
||||
define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
|
||||
&[sql!(
|
||||
@@ -13,29 +11,6 @@ define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
|
||||
)];
|
||||
);
|
||||
|
||||
pub trait Dismissable {
|
||||
const KEY: &'static str;
|
||||
|
||||
fn dismissed() -> bool {
|
||||
KEY_VALUE_STORE
|
||||
.read_kvp(Self::KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
}
|
||||
|
||||
fn set_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(Self::KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
KEY_VALUE_STORE.delete_kvp(Self::KEY.into()).await
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyValueStore {
|
||||
query! {
|
||||
pub fn read_kvp(key: &str) -> Result<Option<String>> {
|
||||
|
||||
@@ -5,7 +5,7 @@ use async_trait::async_trait;
|
||||
use dap::adapters::{
|
||||
DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
|
||||
};
|
||||
use extension::{Extension, WorktreeDelegate};
|
||||
use extension::Extension;
|
||||
use gpui::AsyncApp;
|
||||
|
||||
pub(crate) struct ExtensionDapAdapter {
|
||||
@@ -25,35 +25,6 @@ impl ExtensionDapAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
/// An adapter that allows an [`dap::adapters::DapDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
struct WorktreeDelegateAdapter(pub Arc<dyn DapDelegate>);
|
||||
|
||||
#[async_trait]
|
||||
impl WorktreeDelegate for WorktreeDelegateAdapter {
|
||||
fn id(&self) -> u64 {
|
||||
self.0.worktree_id().to_proto()
|
||||
}
|
||||
|
||||
fn root_path(&self) -> String {
|
||||
self.0.worktree_root_path().to_string_lossy().to_string()
|
||||
}
|
||||
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String> {
|
||||
self.0.read_text_file(path).await
|
||||
}
|
||||
|
||||
async fn which(&self, binary_name: String) -> Option<String> {
|
||||
self.0
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn shell_env(&self) -> Vec<(String, String)> {
|
||||
self.0.shell_env().await.into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for ExtensionDapAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
@@ -62,7 +33,7 @@ impl DebugAdapter for ExtensionDapAdapter {
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
_: &dyn DapDelegate,
|
||||
config: &DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_cx: &mut AsyncApp,
|
||||
@@ -72,7 +43,6 @@ impl DebugAdapter for ExtensionDapAdapter {
|
||||
self.debug_adapter_name.clone(),
|
||||
config.clone(),
|
||||
user_installed_path,
|
||||
Arc::new(WorktreeDelegateAdapter(delegate.clone())),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -69,20 +69,15 @@ pub struct DebugPanel {
|
||||
}
|
||||
|
||||
impl DebugPanel {
|
||||
pub fn new(
|
||||
workspace: &Workspace,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
pub fn new(workspace: &Workspace, cx: &mut Context<Workspace>) -> Entity<Self> {
|
||||
cx.new(|cx| {
|
||||
let project = workspace.project().clone();
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let debug_panel = Self {
|
||||
size: px(300.),
|
||||
sessions: vec![],
|
||||
active_session: None,
|
||||
focus_handle,
|
||||
focus_handle: cx.focus_handle(),
|
||||
project,
|
||||
workspace: workspace.weak_handle(),
|
||||
context_menu: None,
|
||||
@@ -93,24 +88,6 @@ impl DebugPanel {
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn focus_active_item(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let Some(session) = self.active_session.clone() else {
|
||||
return;
|
||||
};
|
||||
let Some(active_pane) = session
|
||||
.read(cx)
|
||||
.running_state()
|
||||
.read(cx)
|
||||
.active_pane()
|
||||
.cloned()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
active_pane.update(cx, |pane, cx| {
|
||||
pane.focus_active_item(window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn sessions(&self) -> Vec<Entity<DebugSession>> {
|
||||
self.sessions.clone()
|
||||
}
|
||||
@@ -205,8 +182,8 @@ impl DebugPanel {
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(async move |cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
let debug_panel = DebugPanel::new(workspace, window, cx);
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let debug_panel = DebugPanel::new(workspace, cx);
|
||||
|
||||
workspace.register_action(|workspace, _: &ClearAllBreakpoints, _, cx| {
|
||||
workspace.project().read(cx).breakpoint_store().update(
|
||||
|
||||
@@ -60,16 +60,7 @@ pub fn init(cx: &mut App) {
|
||||
cx.when_flag_enabled::<DebuggerFeatureFlag>(window, |workspace, _, _| {
|
||||
workspace
|
||||
.register_action(|workspace, _: &ToggleFocus, window, cx| {
|
||||
let did_focus_panel = workspace.toggle_panel_focus::<DebugPanel>(window, cx);
|
||||
if !did_focus_panel {
|
||||
return;
|
||||
};
|
||||
let Some(panel) = workspace.panel::<DebugPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.focus_active_item(window, cx);
|
||||
})
|
||||
workspace.toggle_panel_focus::<DebugPanel>(window, cx);
|
||||
})
|
||||
.register_action(|workspace, _: &Pause, _, cx| {
|
||||
if let Some(debug_panel) = workspace.panel::<DebugPanel>(cx) {
|
||||
|
||||
@@ -81,10 +81,6 @@ impl RunningState {
|
||||
pub(crate) fn thread_id(&self) -> Option<ThreadId> {
|
||||
self.thread_id
|
||||
}
|
||||
|
||||
pub(crate) fn active_pane(&self) -> Option<&Entity<Pane>> {
|
||||
self.active_pane.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for RunningState {
|
||||
@@ -506,15 +502,20 @@ impl DebugTerminal {
|
||||
|
||||
impl gpui::Render for DebugTerminal {
|
||||
fn render(&mut self, _window: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.size_full()
|
||||
.track_focus(&self.focus_handle)
|
||||
.children(self.terminal.clone())
|
||||
if let Some(terminal) = self.terminal.clone() {
|
||||
terminal.into_any_element()
|
||||
} else {
|
||||
div().track_focus(&self.focus_handle).into_any_element()
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Focusable for DebugTerminal {
|
||||
fn focus_handle(&self, _cx: &App) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
if let Some(terminal) = self.terminal.as_ref() {
|
||||
return terminal.focus_handle(cx);
|
||||
} else {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -148,7 +148,7 @@ impl Render for BreakpointList {
|
||||
cx: &mut ui::Context<Self>,
|
||||
) -> impl ui::IntoElement {
|
||||
let old_len = self.breakpoints.len();
|
||||
let breakpoints = self.breakpoint_store.read(cx).all_source_breakpoints(cx);
|
||||
let breakpoints = self.breakpoint_store.read(cx).all_breakpoints(cx);
|
||||
self.breakpoints.clear();
|
||||
let weak = cx.weak_entity();
|
||||
let breakpoints = breakpoints.into_iter().flat_map(|(path, mut breakpoints)| {
|
||||
|
||||
@@ -365,7 +365,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: format!("{} {}", string_match.string.clone(), variable_value),
|
||||
text: format!("{} {}", string_match.string, variable_value),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
|
||||
@@ -39,7 +39,6 @@ pub struct StackFrameList {
|
||||
_refresh_task: Task<()>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum StackFrameEntry {
|
||||
Normal(dap::StackFrame),
|
||||
|
||||
@@ -23,7 +23,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -40,7 +40,6 @@ pub const MENU_ASIDE_X_PADDING: Pixels = px(16.);
|
||||
pub const MENU_ASIDE_MIN_WIDTH: Pixels = px(260.);
|
||||
pub const MENU_ASIDE_MAX_WIDTH: Pixels = px(500.);
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum CodeContextMenu {
|
||||
Completions(CompletionsMenu),
|
||||
CodeActions(CodeActionsMenu),
|
||||
@@ -664,12 +663,13 @@ impl CompletionsMenu {
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum MatchTier<'a> {
|
||||
WordStartMatch {
|
||||
sort_prefix: Reverse<usize>,
|
||||
sort_fuzzy_bracket: Reverse<usize>,
|
||||
sort_mixed_case_prefix_length: Reverse<usize>,
|
||||
sort_snippet: Reverse<i32>,
|
||||
sort_kind: usize,
|
||||
sort_fuzzy_bracket: Reverse<usize>,
|
||||
sort_text: Option<&'a str>,
|
||||
sort_score: Reverse<OrderedFloat<f64>>,
|
||||
sort_key: (usize, &'a str),
|
||||
sort_label: &'a str,
|
||||
},
|
||||
OtherMatch {
|
||||
sort_score: Reverse<OrderedFloat<f64>>,
|
||||
@@ -680,12 +680,12 @@ impl CompletionsMenu {
|
||||
// balance the raw fuzzy match score with hints from the language server
|
||||
|
||||
// In a fuzzy bracket, matches with a score of 1.0 are prioritized.
|
||||
// The remaining matches are partitioned into two groups at 2/3 of the max_score.
|
||||
// The remaining matches are partitioned into two groups at 3/5 of the max_score.
|
||||
let max_score = matches
|
||||
.iter()
|
||||
.map(|mat| mat.string_match.score)
|
||||
.fold(0.0, f64::max);
|
||||
let second_bracket_threshold = max_score * (2.0 / 3.0);
|
||||
let fuzzy_bracket_threshold = max_score * (3.0 / 5.0);
|
||||
|
||||
let query_start_lower = query
|
||||
.and_then(|q| q.chars().next())
|
||||
@@ -709,9 +709,7 @@ impl CompletionsMenu {
|
||||
if query_start_doesnt_match_split_words {
|
||||
MatchTier::OtherMatch { sort_score }
|
||||
} else {
|
||||
let sort_fuzzy_bracket = Reverse(if score == 1.0 {
|
||||
2
|
||||
} else if score >= second_bracket_threshold {
|
||||
let sort_fuzzy_bracket = Reverse(if score >= fuzzy_bracket_threshold {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
@@ -721,7 +719,7 @@ impl CompletionsMenu {
|
||||
SnippetSortOrder::Bottom => Reverse(if mat.is_snippet { 0 } else { 1 }),
|
||||
SnippetSortOrder::Inline => Reverse(0),
|
||||
};
|
||||
let mixed_case_prefix_length = Reverse(
|
||||
let sort_mixed_case_prefix_length = Reverse(
|
||||
query
|
||||
.map(|q| {
|
||||
q.chars()
|
||||
@@ -741,12 +739,13 @@ impl CompletionsMenu {
|
||||
.unwrap_or(0),
|
||||
);
|
||||
MatchTier::WordStartMatch {
|
||||
sort_prefix: mixed_case_prefix_length,
|
||||
sort_fuzzy_bracket,
|
||||
sort_mixed_case_prefix_length,
|
||||
sort_snippet,
|
||||
sort_kind: mat.sort_kind,
|
||||
sort_fuzzy_bracket,
|
||||
sort_text: mat.sort_text,
|
||||
sort_score,
|
||||
sort_key: mat.sort_key,
|
||||
sort_label: mat.sort_label,
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -797,13 +796,14 @@ impl CompletionsMenu {
|
||||
None
|
||||
};
|
||||
|
||||
let sort_key = completion.sort_key();
|
||||
let (sort_kind, sort_label) = completion.sort_key();
|
||||
|
||||
SortableMatch {
|
||||
string_match,
|
||||
is_snippet,
|
||||
sort_text,
|
||||
sort_key,
|
||||
sort_kind,
|
||||
sort_label,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -828,7 +828,8 @@ pub struct SortableMatch<'a> {
|
||||
pub string_match: StringMatch,
|
||||
pub is_snippet: bool,
|
||||
pub sort_text: Option<&'a str>,
|
||||
pub sort_key: (usize, &'a str),
|
||||
pub sort_kind: usize,
|
||||
pub sort_label: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -926,7 +927,6 @@ impl CodeActionContents {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone)]
|
||||
pub enum CodeActionsItem {
|
||||
Task(TaskSourceKind, ResolvedTask),
|
||||
|
||||
@@ -282,7 +282,6 @@ struct Transform {
|
||||
block: Option<Block>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone)]
|
||||
pub enum Block {
|
||||
Custom(Arc<CustomBlock>),
|
||||
|
||||
@@ -107,9 +107,9 @@ pub use items::MAX_TAB_TITLE_LEN;
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
AutoindentMode, BracketMatch, BracketPair, Buffer, Capability, CharKind, CodeLabel,
|
||||
CursorShape, DiagnosticEntry, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText,
|
||||
IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject,
|
||||
TransactionId, TreeSitterOptions, WordsQuery,
|
||||
CursorShape, DiagnosticEntry, DiffOptions, DocumentationConfig, EditPredictionsMode,
|
||||
EditPreview, HighlightedText, IndentKind, IndentSize, Language, OffsetRangeExt, Point,
|
||||
Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery,
|
||||
language_settings::{
|
||||
self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode,
|
||||
all_language_settings, language_settings,
|
||||
@@ -122,11 +122,10 @@ use markdown::Markdown;
|
||||
use mouse_context_menu::MouseContextMenu;
|
||||
use persistence::DB;
|
||||
use project::{
|
||||
BreakpointWithPosition, ProjectPath,
|
||||
ProjectPath,
|
||||
debugger::{
|
||||
breakpoint_store::{
|
||||
BreakpointEditAction, BreakpointSessionState, BreakpointState, BreakpointStore,
|
||||
BreakpointStoreEvent,
|
||||
BreakpointEditAction, BreakpointState, BreakpointStore, BreakpointStoreEvent,
|
||||
},
|
||||
session::{Session, SessionEvent},
|
||||
},
|
||||
@@ -199,7 +198,7 @@ use theme::{
|
||||
};
|
||||
use ui::{
|
||||
ButtonSize, ButtonStyle, ContextMenu, Disclosure, IconButton, IconButtonShape, IconName,
|
||||
IconSize, Indicator, Key, Tooltip, h_flex, prelude::*,
|
||||
IconSize, Key, Tooltip, h_flex, prelude::*,
|
||||
};
|
||||
use util::{RangeExt, ResultExt, TryFutureExt, maybe, post_inc};
|
||||
use workspace::{
|
||||
@@ -1312,7 +1311,7 @@ pub struct ActiveDiagnosticGroup {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
|
||||
pub(crate) enum ActiveDiagnostic {
|
||||
None,
|
||||
All,
|
||||
@@ -3913,7 +3912,7 @@ impl Editor {
|
||||
pub fn newline(&mut self, _: &Newline, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
|
||||
self.transact(window, cx, |this, window, cx| {
|
||||
let (edits, selection_fixup_info): (Vec<_>, Vec<_>) = {
|
||||
let (edits_with_flags, selection_info): (Vec<_>, Vec<_>) = {
|
||||
let selections = this.selections.all::<usize>(cx);
|
||||
let multi_buffer = this.buffer.read(cx);
|
||||
let buffer = multi_buffer.snapshot(cx);
|
||||
@@ -3921,17 +3920,21 @@ impl Editor {
|
||||
.iter()
|
||||
.map(|selection| {
|
||||
let start_point = selection.start.to_point(&buffer);
|
||||
let mut indent =
|
||||
let mut existing_indent =
|
||||
buffer.indent_size_for_line(MultiBufferRow(start_point.row));
|
||||
indent.len = cmp::min(indent.len, start_point.column);
|
||||
existing_indent.len = cmp::min(existing_indent.len, start_point.column);
|
||||
let start = selection.start;
|
||||
let end = selection.end;
|
||||
let selection_is_empty = start == end;
|
||||
let language_scope = buffer.language_scope_at(start);
|
||||
let (comment_delimiter, insert_extra_newline) = if let Some(language) =
|
||||
&language_scope
|
||||
{
|
||||
let insert_extra_newline =
|
||||
let (
|
||||
comment_delimiter,
|
||||
doc_delimiter,
|
||||
insert_extra_newline,
|
||||
indent_on_newline,
|
||||
indent_on_extra_newline,
|
||||
) = if let Some(language) = &language_scope {
|
||||
let mut insert_extra_newline =
|
||||
insert_extra_newline_brackets(&buffer, start..end, language)
|
||||
|| insert_extra_newline_tree_sitter(&buffer, start..end);
|
||||
|
||||
@@ -3951,63 +3954,208 @@ impl Editor {
|
||||
let (snapshot, range) =
|
||||
buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?;
|
||||
|
||||
let mut index_of_first_non_whitespace = 0;
|
||||
let num_of_whitespaces = snapshot
|
||||
.chars_for_range(range.clone())
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.count();
|
||||
let comment_candidate = snapshot
|
||||
.chars_for_range(range)
|
||||
.skip_while(|c| {
|
||||
let should_skip = c.is_whitespace();
|
||||
if should_skip {
|
||||
index_of_first_non_whitespace += 1;
|
||||
}
|
||||
should_skip
|
||||
})
|
||||
.skip(num_of_whitespaces)
|
||||
.take(max_len_of_delimiter)
|
||||
.collect::<String>();
|
||||
let comment_prefix = delimiters.iter().find(|comment_prefix| {
|
||||
comment_candidate.starts_with(comment_prefix.as_ref())
|
||||
})?;
|
||||
let (delimiter, trimmed_len) =
|
||||
delimiters.iter().find_map(|delimiter| {
|
||||
let trimmed = delimiter.trim_end();
|
||||
if comment_candidate.starts_with(trimmed) {
|
||||
Some((delimiter, trimmed.len()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
let cursor_is_placed_after_comment_marker =
|
||||
index_of_first_non_whitespace + comment_prefix.len()
|
||||
<= start_point.column as usize;
|
||||
num_of_whitespaces + trimmed_len <= start_point.column as usize;
|
||||
if cursor_is_placed_after_comment_marker {
|
||||
Some(comment_prefix.clone())
|
||||
Some(delimiter.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
(comment_delimiter, insert_extra_newline)
|
||||
|
||||
let mut indent_on_newline = IndentSize::spaces(0);
|
||||
let mut indent_on_extra_newline = IndentSize::spaces(0);
|
||||
|
||||
let doc_delimiter = maybe!({
|
||||
if !selection_is_empty {
|
||||
return None;
|
||||
}
|
||||
|
||||
if !multi_buffer.language_settings(cx).extend_comment_on_newline {
|
||||
return None;
|
||||
}
|
||||
|
||||
let DocumentationConfig {
|
||||
start: start_tag,
|
||||
end: end_tag,
|
||||
prefix: delimiter,
|
||||
tab_size: len,
|
||||
} = language.documentation()?;
|
||||
|
||||
let (snapshot, range) =
|
||||
buffer.buffer_line_for_row(MultiBufferRow(start_point.row))?;
|
||||
|
||||
let num_of_whitespaces = snapshot
|
||||
.chars_for_range(range.clone())
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.count();
|
||||
|
||||
let cursor_is_after_start_tag = {
|
||||
let start_tag_len = start_tag.len();
|
||||
let start_tag_line = snapshot
|
||||
.chars_for_range(range.clone())
|
||||
.skip(num_of_whitespaces)
|
||||
.take(start_tag_len)
|
||||
.collect::<String>();
|
||||
if start_tag_line.starts_with(start_tag.as_ref()) {
|
||||
num_of_whitespaces + start_tag_len
|
||||
<= start_point.column as usize
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
let cursor_is_after_delimiter = {
|
||||
let delimiter_trim = delimiter.trim_end();
|
||||
let delimiter_line = snapshot
|
||||
.chars_for_range(range.clone())
|
||||
.skip(num_of_whitespaces)
|
||||
.take(delimiter_trim.len())
|
||||
.collect::<String>();
|
||||
if delimiter_line.starts_with(delimiter_trim) {
|
||||
num_of_whitespaces + delimiter_trim.len()
|
||||
<= start_point.column as usize
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
let cursor_is_before_end_tag_if_exists = {
|
||||
let num_of_whitespaces_rev = snapshot
|
||||
.reversed_chars_for_range(range.clone())
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.count();
|
||||
let mut line_iter = snapshot
|
||||
.reversed_chars_for_range(range)
|
||||
.skip(num_of_whitespaces_rev);
|
||||
let end_tag_exists = end_tag
|
||||
.chars()
|
||||
.rev()
|
||||
.all(|char| line_iter.next() == Some(char));
|
||||
if end_tag_exists {
|
||||
let max_point = snapshot.line_len(start_point.row) as usize;
|
||||
let ordering = (num_of_whitespaces_rev
|
||||
+ end_tag.len()
|
||||
+ start_point.column as usize)
|
||||
.cmp(&max_point);
|
||||
let cursor_is_before_end_tag =
|
||||
ordering != Ordering::Greater;
|
||||
if cursor_is_after_start_tag {
|
||||
if cursor_is_before_end_tag {
|
||||
insert_extra_newline = true;
|
||||
}
|
||||
let cursor_is_at_start_of_end_tag =
|
||||
ordering == Ordering::Equal;
|
||||
if cursor_is_at_start_of_end_tag {
|
||||
indent_on_extra_newline.len = (*len).into();
|
||||
}
|
||||
}
|
||||
cursor_is_before_end_tag
|
||||
} else {
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
if (cursor_is_after_start_tag || cursor_is_after_delimiter)
|
||||
&& cursor_is_before_end_tag_if_exists
|
||||
{
|
||||
if cursor_is_after_start_tag {
|
||||
indent_on_newline.len = (*len).into();
|
||||
}
|
||||
Some(delimiter.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
(
|
||||
comment_delimiter,
|
||||
doc_delimiter,
|
||||
insert_extra_newline,
|
||||
indent_on_newline,
|
||||
indent_on_extra_newline,
|
||||
)
|
||||
} else {
|
||||
(None, false)
|
||||
(
|
||||
None,
|
||||
None,
|
||||
false,
|
||||
IndentSize::default(),
|
||||
IndentSize::default(),
|
||||
)
|
||||
};
|
||||
|
||||
let capacity_for_delimiter = comment_delimiter
|
||||
.as_deref()
|
||||
.map(str::len)
|
||||
.unwrap_or_default();
|
||||
let mut new_text =
|
||||
String::with_capacity(1 + capacity_for_delimiter + indent.len as usize);
|
||||
let prevent_auto_indent = doc_delimiter.is_some();
|
||||
let delimiter = comment_delimiter.or(doc_delimiter);
|
||||
|
||||
let capacity_for_delimiter =
|
||||
delimiter.as_deref().map(str::len).unwrap_or_default();
|
||||
let mut new_text = String::with_capacity(
|
||||
1 + capacity_for_delimiter
|
||||
+ existing_indent.len as usize
|
||||
+ indent_on_newline.len as usize
|
||||
+ indent_on_extra_newline.len as usize,
|
||||
);
|
||||
new_text.push('\n');
|
||||
new_text.extend(indent.chars());
|
||||
if let Some(delimiter) = &comment_delimiter {
|
||||
new_text.extend(existing_indent.chars());
|
||||
new_text.extend(indent_on_newline.chars());
|
||||
|
||||
if let Some(delimiter) = &delimiter {
|
||||
new_text.push_str(delimiter);
|
||||
}
|
||||
|
||||
if insert_extra_newline {
|
||||
new_text = new_text.repeat(2);
|
||||
new_text.push('\n');
|
||||
new_text.extend(existing_indent.chars());
|
||||
new_text.extend(indent_on_extra_newline.chars());
|
||||
}
|
||||
|
||||
let anchor = buffer.anchor_after(end);
|
||||
let new_selection = selection.map(|_| anchor);
|
||||
(
|
||||
(start..end, new_text),
|
||||
((start..end, new_text), prevent_auto_indent),
|
||||
(insert_extra_newline, new_selection),
|
||||
)
|
||||
})
|
||||
.unzip()
|
||||
};
|
||||
|
||||
this.edit_with_autoindent(edits, cx);
|
||||
let mut auto_indent_edits = Vec::new();
|
||||
let mut edits = Vec::new();
|
||||
for (edit, prevent_auto_indent) in edits_with_flags {
|
||||
if prevent_auto_indent {
|
||||
edits.push(edit);
|
||||
} else {
|
||||
auto_indent_edits.push(edit);
|
||||
}
|
||||
}
|
||||
if !edits.is_empty() {
|
||||
this.edit(edits, cx);
|
||||
}
|
||||
if !auto_indent_edits.is_empty() {
|
||||
this.edit_with_autoindent(auto_indent_edits, cx);
|
||||
}
|
||||
|
||||
let buffer = this.buffer.read(cx).snapshot(cx);
|
||||
let new_selections = selection_fixup_info
|
||||
let new_selections = selection_info
|
||||
.into_iter()
|
||||
.map(|(extra_newline_inserted, new_selection)| {
|
||||
let mut cursor = new_selection.end.to_point(&buffer);
|
||||
@@ -6849,7 +6997,7 @@ impl Editor {
|
||||
range: Range<DisplayRow>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> HashMap<DisplayRow, (Anchor, Breakpoint, Option<BreakpointSessionState>)> {
|
||||
) -> HashMap<DisplayRow, (Anchor, Breakpoint)> {
|
||||
let mut breakpoint_display_points = HashMap::default();
|
||||
|
||||
let Some(breakpoint_store) = self.breakpoint_store.clone() else {
|
||||
@@ -6883,17 +7031,15 @@ impl Editor {
|
||||
buffer_snapshot,
|
||||
cx,
|
||||
);
|
||||
for (breakpoint, state) in breakpoints {
|
||||
for (anchor, breakpoint) in breakpoints {
|
||||
let multi_buffer_anchor =
|
||||
Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), breakpoint.position);
|
||||
Anchor::in_buffer(excerpt_id, buffer_snapshot.remote_id(), *anchor);
|
||||
let position = multi_buffer_anchor
|
||||
.to_point(&multi_buffer_snapshot)
|
||||
.to_display_point(&snapshot);
|
||||
|
||||
breakpoint_display_points.insert(
|
||||
position.row(),
|
||||
(multi_buffer_anchor, breakpoint.bp.clone(), state),
|
||||
);
|
||||
breakpoint_display_points
|
||||
.insert(position.row(), (multi_buffer_anchor, breakpoint.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7068,10 +7214,8 @@ impl Editor {
|
||||
position: Anchor,
|
||||
row: DisplayRow,
|
||||
breakpoint: &Breakpoint,
|
||||
state: Option<BreakpointSessionState>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> IconButton {
|
||||
let is_rejected = state.is_some_and(|s| !s.verified);
|
||||
// Is it a breakpoint that shows up when hovering over gutter?
|
||||
let (is_phantom, collides_with_existing) = self.gutter_breakpoint_indicator.0.map_or(
|
||||
(false, false),
|
||||
@@ -7097,8 +7241,6 @@ impl Editor {
|
||||
|
||||
let color = if is_phantom {
|
||||
Color::Hint
|
||||
} else if is_rejected {
|
||||
Color::Disabled
|
||||
} else {
|
||||
Color::Debugger
|
||||
};
|
||||
@@ -7126,18 +7268,9 @@ impl Editor {
|
||||
}
|
||||
let primary_text = SharedString::from(primary_text);
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
let meta = if is_rejected {
|
||||
"No executable code is associated with this line."
|
||||
} else {
|
||||
"Right-click for more options."
|
||||
};
|
||||
IconButton::new(("breakpoint_indicator", row.0 as usize), icon)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.size(ui::ButtonSize::None)
|
||||
.when(is_rejected, |this| {
|
||||
this.indicator(Indicator::icon(Icon::new(IconName::Warning)).color(Color::Warning))
|
||||
})
|
||||
.icon_color(color)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(cx.listener({
|
||||
@@ -7169,7 +7302,14 @@ impl Editor {
|
||||
);
|
||||
}))
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::with_meta_in(primary_text.clone(), None, meta, &focus_handle, window, cx)
|
||||
Tooltip::with_meta_in(
|
||||
primary_text.clone(),
|
||||
None,
|
||||
"Right-click for more options",
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -7309,11 +7449,11 @@ impl Editor {
|
||||
_style: &EditorStyle,
|
||||
is_active: bool,
|
||||
row: DisplayRow,
|
||||
breakpoint: Option<(Anchor, Breakpoint, Option<BreakpointSessionState>)>,
|
||||
breakpoint: Option<(Anchor, Breakpoint)>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> IconButton {
|
||||
let color = Color::Muted;
|
||||
let position = breakpoint.as_ref().map(|(anchor, _, _)| *anchor);
|
||||
let position = breakpoint.as_ref().map(|(anchor, _)| *anchor);
|
||||
|
||||
IconButton::new(("run_indicator", row.0 as usize), ui::IconName::Play)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
@@ -9493,16 +9633,16 @@ impl Editor {
|
||||
cx,
|
||||
)
|
||||
.next()
|
||||
.and_then(|(bp, _)| {
|
||||
.and_then(|(anchor, bp)| {
|
||||
let breakpoint_row = buffer_snapshot
|
||||
.summary_for_anchor::<text::PointUtf16>(&bp.position)
|
||||
.summary_for_anchor::<text::PointUtf16>(anchor)
|
||||
.row;
|
||||
|
||||
if breakpoint_row == row {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(enclosing_excerpt, bp.position)
|
||||
.map(|position| (position, bp.bp.clone()))
|
||||
.anchor_in_excerpt(enclosing_excerpt, *anchor)
|
||||
.map(|anchor| (anchor, bp.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -9665,10 +9805,7 @@ impl Editor {
|
||||
breakpoint_store.update(cx, |breakpoint_store, cx| {
|
||||
breakpoint_store.toggle_breakpoint(
|
||||
buffer,
|
||||
BreakpointWithPosition {
|
||||
position: breakpoint_position.text_anchor,
|
||||
bp: breakpoint,
|
||||
},
|
||||
(breakpoint_position.text_anchor, breakpoint),
|
||||
edit_action,
|
||||
cx,
|
||||
);
|
||||
@@ -20214,8 +20351,8 @@ impl EditorSnapshot {
|
||||
let participant_indices = collaboration_hub.user_participant_indices(cx);
|
||||
let collaborators_by_peer_id = collaboration_hub.collaborators(cx);
|
||||
let collaborators_by_replica_id = collaborators_by_peer_id
|
||||
.iter()
|
||||
.map(|(_, collaborator)| (collaborator.replica_id, collaborator))
|
||||
.values()
|
||||
.map(|collaborator| (collaborator.replica_id, collaborator))
|
||||
.collect::<HashMap<_, _>>();
|
||||
self.buffer_snapshot
|
||||
.selections_in_range(range, false)
|
||||
|
||||
@@ -6,8 +6,6 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources, VsCodeSettings};
|
||||
use util::serde::default_true;
|
||||
|
||||
/// Imports from the VSCode settings at
|
||||
/// https://code.visualstudio.com/docs/reference/default-settings
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct EditorSettings {
|
||||
pub cursor_blink: bool,
|
||||
@@ -541,7 +539,7 @@ pub struct ScrollbarContent {
|
||||
}
|
||||
|
||||
/// Minimap related settings
|
||||
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
pub struct MinimapContent {
|
||||
/// When to show the minimap in the editor.
|
||||
///
|
||||
@@ -772,32 +770,5 @@ impl Settings for EditorSettings {
|
||||
let search = current.search.get_or_insert_default();
|
||||
search.include_ignored = use_ignored;
|
||||
}
|
||||
|
||||
let mut minimap = MinimapContent::default();
|
||||
let minimap_enabled = vscode.read_bool("editor.minimap.enabled").unwrap_or(true);
|
||||
let autohide = vscode.read_bool("editor.minimap.autohide");
|
||||
if minimap_enabled {
|
||||
if let Some(false) = autohide {
|
||||
minimap.show = Some(ShowMinimap::Always);
|
||||
} else {
|
||||
minimap.show = Some(ShowMinimap::Auto);
|
||||
}
|
||||
} else {
|
||||
minimap.show = Some(ShowMinimap::Never);
|
||||
}
|
||||
|
||||
vscode.enum_setting(
|
||||
"editor.minimap.showSlider",
|
||||
&mut minimap.thumb,
|
||||
|s| match s {
|
||||
"always" => Some(MinimapThumb::Always),
|
||||
"mouseover" => Some(MinimapThumb::Hover),
|
||||
_ => None,
|
||||
},
|
||||
);
|
||||
|
||||
if minimap != MinimapContent::default() {
|
||||
current.minimap = Some(minimap)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ use util::{
|
||||
uri,
|
||||
};
|
||||
use workspace::{
|
||||
CloseAllItems, CloseInactiveItems, NavigationEntry, ViewId,
|
||||
CloseActiveItem, CloseAllItems, CloseInactiveItems, NavigationEntry, OpenOptions, ViewId,
|
||||
item::{FollowEvent, FollowableItem, Item, ItemHandle},
|
||||
};
|
||||
|
||||
@@ -2755,7 +2755,7 @@ async fn test_newline_comments(cx: &mut TestAppContext) {
|
||||
|
||||
let language = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
line_comments: vec!["//".into()],
|
||||
line_comments: vec!["// ".into()],
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
None,
|
||||
@@ -2770,7 +2770,29 @@ async fn test_newline_comments(cx: &mut TestAppContext) {
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
// Foo
|
||||
// ˇ
|
||||
"});
|
||||
// Ensure that we add comment prefix when existing line contains space
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(
|
||||
indoc! {"
|
||||
// Foo
|
||||
//s
|
||||
// ˇ
|
||||
"}
|
||||
.replace("s", " ") // s is used as space placeholder to prevent format on save
|
||||
.as_str(),
|
||||
);
|
||||
// Ensure that we add comment prefix when existing line does not contain space
|
||||
cx.set_state(indoc! {"
|
||||
// Foo
|
||||
//ˇ
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
// Foo
|
||||
//
|
||||
// ˇ
|
||||
"});
|
||||
// Ensure that if cursor is before the comment start, we do not actually insert a comment prefix.
|
||||
cx.set_state(indoc! {"
|
||||
@@ -2797,6 +2819,177 @@ async fn test_newline_comments(cx: &mut TestAppContext) {
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_newline_documentation_comments(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.tab_size = NonZeroU32::new(4)
|
||||
});
|
||||
|
||||
let language = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
documentation: Some(language::DocumentationConfig {
|
||||
start: "/**".into(),
|
||||
end: "*/".into(),
|
||||
prefix: "* ".into(),
|
||||
tab_size: NonZeroU32::new(1).unwrap(),
|
||||
}),
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
None,
|
||||
));
|
||||
{
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
|
||||
cx.set_state(indoc! {"
|
||||
/**ˇ
|
||||
"});
|
||||
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
* ˇ
|
||||
"});
|
||||
// Ensure that if cursor is before the comment start,
|
||||
// we do not actually insert a comment prefix.
|
||||
cx.set_state(indoc! {"
|
||||
ˇ/**
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
|
||||
ˇ/**
|
||||
"});
|
||||
// Ensure that if cursor is between it doesn't add comment prefix.
|
||||
cx.set_state(indoc! {"
|
||||
/*ˇ*
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/*
|
||||
ˇ*
|
||||
"});
|
||||
// Ensure that if suffix exists on same line after cursor it adds new line.
|
||||
cx.set_state(indoc! {"
|
||||
/**ˇ*/
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
* ˇ
|
||||
*/
|
||||
"});
|
||||
// Ensure that if suffix exists on same line after cursor with space it adds new line.
|
||||
cx.set_state(indoc! {"
|
||||
/**ˇ */
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
* ˇ
|
||||
*/
|
||||
"});
|
||||
// Ensure that if suffix exists on same line after cursor with space it adds new line.
|
||||
cx.set_state(indoc! {"
|
||||
/** ˇ*/
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(
|
||||
indoc! {"
|
||||
/**s
|
||||
* ˇ
|
||||
*/
|
||||
"}
|
||||
.replace("s", " ") // s is used as space placeholder to prevent format on save
|
||||
.as_str(),
|
||||
);
|
||||
// Ensure that delimiter space is preserved when newline on already
|
||||
// spaced delimiter.
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(
|
||||
indoc! {"
|
||||
/**s
|
||||
*s
|
||||
* ˇ
|
||||
*/
|
||||
"}
|
||||
.replace("s", " ") // s is used as space placeholder to prevent format on save
|
||||
.as_str(),
|
||||
);
|
||||
// Ensure that delimiter space is preserved when space is not
|
||||
// on existing delimiter.
|
||||
cx.set_state(indoc! {"
|
||||
/**
|
||||
*ˇ
|
||||
*/
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
*
|
||||
* ˇ
|
||||
*/
|
||||
"});
|
||||
// Ensure that if suffix exists on same line after cursor it
|
||||
// doesn't add extra new line if prefix is not on same line.
|
||||
cx.set_state(indoc! {"
|
||||
/**
|
||||
ˇ*/
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
|
||||
ˇ*/
|
||||
"});
|
||||
// Ensure that it detects suffix after existing prefix.
|
||||
cx.set_state(indoc! {"
|
||||
/**ˇ/
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
ˇ/
|
||||
"});
|
||||
// Ensure that if suffix exists on same line before
|
||||
// cursor it does not add comment prefix.
|
||||
cx.set_state(indoc! {"
|
||||
/** */ˇ
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/** */
|
||||
ˇ
|
||||
"});
|
||||
// Ensure that if suffix exists on same line before
|
||||
// cursor it does not add comment prefix.
|
||||
cx.set_state(indoc! {"
|
||||
/**
|
||||
*
|
||||
*/ˇ
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
*
|
||||
*/
|
||||
ˇ
|
||||
"});
|
||||
}
|
||||
// Ensure that comment continuations can be disabled.
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.extend_comment_on_newline = Some(false);
|
||||
});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
cx.set_state(indoc! {"
|
||||
/**ˇ
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
|
||||
cx.assert_editor_state(indoc! {"
|
||||
/**
|
||||
ˇ
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_insert_with_old_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -18522,7 +18715,7 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18547,7 +18740,7 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18569,7 +18762,7 @@ async fn test_breakpoint_toggling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18636,7 +18829,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18657,7 +18850,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18677,7 +18870,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18700,7 +18893,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18723,7 +18916,7 @@ async fn test_log_breakpoint_editing(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18816,7 +19009,7 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18848,7 +19041,7 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -18884,7 +19077,7 @@ async fn test_breakpoint_enabling_and_disabling(cx: &mut TestAppContext) {
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.all_source_breakpoints(cx)
|
||||
.all_breakpoints(cx)
|
||||
.clone()
|
||||
});
|
||||
|
||||
@@ -19867,6 +20060,156 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_invisible_worktree_servers(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a": {
|
||||
"main.rs": "fn main() {}",
|
||||
},
|
||||
"foo": {
|
||||
"bar": {
|
||||
"external_file.rs": "pub mod external {}",
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [path!("/root/a").as_ref()], cx).await;
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(rust_lang());
|
||||
let _fake_servers = language_registry.register_fake_lsp(
|
||||
"Rust",
|
||||
FakeLspAdapter {
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let worktree_id = workspace.update(cx, |workspace, cx| {
|
||||
workspace.project().update(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
})
|
||||
});
|
||||
|
||||
let assert_language_servers_count =
|
||||
|expected: usize, context: &str, cx: &mut VisualTestContext| {
|
||||
project.update(cx, |project, cx| {
|
||||
let current = project
|
||||
.lsp_store()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.language_servers
|
||||
.len();
|
||||
assert_eq!(expected, current, "{context}");
|
||||
});
|
||||
};
|
||||
|
||||
assert_language_servers_count(
|
||||
0,
|
||||
"No servers should be running before any file is open",
|
||||
cx,
|
||||
);
|
||||
let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
|
||||
let main_editor = workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
(worktree_id, "main.rs"),
|
||||
Some(pane.downgrade()),
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
pane.update(cx, |pane, cx| {
|
||||
let open_editor = pane.active_item().unwrap().downcast::<Editor>().unwrap();
|
||||
open_editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"fn main() {}",
|
||||
"Original main.rs text on initial open",
|
||||
);
|
||||
});
|
||||
assert_eq!(open_editor, main_editor);
|
||||
});
|
||||
assert_language_servers_count(1, "First *.rs file starts a language server", cx);
|
||||
|
||||
let external_editor = workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.open_abs_path(
|
||||
PathBuf::from("/root/foo/bar/external_file.rs"),
|
||||
OpenOptions::default(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.expect("opening external file")
|
||||
.downcast::<Editor>()
|
||||
.expect("downcasted external file's open element to editor");
|
||||
pane.update(cx, |pane, cx| {
|
||||
let open_editor = pane.active_item().unwrap().downcast::<Editor>().unwrap();
|
||||
open_editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"pub mod external {}",
|
||||
"External file is open now",
|
||||
);
|
||||
});
|
||||
assert_eq!(open_editor, external_editor);
|
||||
});
|
||||
assert_language_servers_count(
|
||||
1,
|
||||
"Second, external, *.rs file should join the existing server",
|
||||
cx,
|
||||
);
|
||||
|
||||
pane.update_in(cx, |pane, window, cx| {
|
||||
pane.close_active_item(&CloseActiveItem::default(), window, cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
pane.update_in(cx, |pane, window, cx| {
|
||||
pane.navigate_backward(window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
pane.update(cx, |pane, cx| {
|
||||
let open_editor = pane.active_item().unwrap().downcast::<Editor>().unwrap();
|
||||
open_editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"pub mod external {}",
|
||||
"External file is open now",
|
||||
);
|
||||
});
|
||||
});
|
||||
assert_language_servers_count(
|
||||
1,
|
||||
"After closing and reopening (with navigate back) of an external file, no extra language servers should appear",
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.update(|_, cx| {
|
||||
workspace::reload(&workspace::Reload::default(), cx);
|
||||
});
|
||||
assert_language_servers_count(
|
||||
1,
|
||||
"After reloading the worktree with local and external files opened, only one project should be started",
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
|
||||
let point = DisplayPoint::new(DisplayRow(row as u32), column as u32);
|
||||
point..point
|
||||
|
||||
@@ -62,7 +62,7 @@ use multi_buffer::{
|
||||
|
||||
use project::{
|
||||
ProjectPath,
|
||||
debugger::breakpoint_store::{Breakpoint, BreakpointSessionState},
|
||||
debugger::breakpoint_store::Breakpoint,
|
||||
project_settings::{GitGutterSetting, GitHunkStyleSetting, ProjectSettings},
|
||||
};
|
||||
use settings::Settings;
|
||||
@@ -2320,7 +2320,7 @@ impl EditorElement {
|
||||
gutter_hitbox: &Hitbox,
|
||||
display_hunks: &[(DisplayDiffHunk, Option<Hitbox>)],
|
||||
snapshot: &EditorSnapshot,
|
||||
breakpoints: HashMap<DisplayRow, (Anchor, Breakpoint, Option<BreakpointSessionState>)>,
|
||||
breakpoints: HashMap<DisplayRow, (Anchor, Breakpoint)>,
|
||||
row_infos: &[RowInfo],
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -2328,7 +2328,7 @@ impl EditorElement {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
breakpoints
|
||||
.into_iter()
|
||||
.filter_map(|(display_row, (text_anchor, bp, state))| {
|
||||
.filter_map(|(display_row, (text_anchor, bp))| {
|
||||
if row_infos
|
||||
.get((display_row.0.saturating_sub(range.start.0)) as usize)
|
||||
.is_some_and(|row_info| {
|
||||
@@ -2351,7 +2351,7 @@ impl EditorElement {
|
||||
return None;
|
||||
}
|
||||
|
||||
let button = editor.render_breakpoint(text_anchor, display_row, &bp, state, cx);
|
||||
let button = editor.render_breakpoint(text_anchor, display_row, &bp, cx);
|
||||
|
||||
let button = prepaint_gutter_button(
|
||||
button,
|
||||
@@ -2381,7 +2381,7 @@ impl EditorElement {
|
||||
gutter_hitbox: &Hitbox,
|
||||
display_hunks: &[(DisplayDiffHunk, Option<Hitbox>)],
|
||||
snapshot: &EditorSnapshot,
|
||||
breakpoints: &mut HashMap<DisplayRow, (Anchor, Breakpoint, Option<BreakpointSessionState>)>,
|
||||
breakpoints: &mut HashMap<DisplayRow, (Anchor, Breakpoint)>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Vec<AnyElement> {
|
||||
@@ -6393,7 +6393,6 @@ pub(crate) struct LineWithInvisibles {
|
||||
font_size: Pixels,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum LineFragment {
|
||||
Text(ShapedLine),
|
||||
Element {
|
||||
@@ -7454,10 +7453,8 @@ impl Element for EditorElement {
|
||||
editor.active_breakpoints(start_row..end_row, window, cx)
|
||||
});
|
||||
if cx.has_flag::<DebuggerFeatureFlag>() {
|
||||
for (display_row, (_, bp, state)) in &breakpoint_rows {
|
||||
if bp.is_enabled() && state.is_none_or(|s| s.verified) {
|
||||
active_rows.entry(*display_row).or_default().breakpoint = true;
|
||||
}
|
||||
for display_row in breakpoint_rows.keys() {
|
||||
active_rows.entry(*display_row).or_default().breakpoint = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7497,7 +7494,7 @@ impl Element for EditorElement {
|
||||
let breakpoint = Breakpoint::new_standard();
|
||||
phantom_breakpoint.collides_with_existing_breakpoint =
|
||||
false;
|
||||
(position, breakpoint, None)
|
||||
(position, breakpoint)
|
||||
});
|
||||
}
|
||||
})
|
||||
|
||||
@@ -884,6 +884,7 @@ impl InfoPopover {
|
||||
*keyboard_grace = false;
|
||||
cx.stop_propagation();
|
||||
})
|
||||
.p_2()
|
||||
.when_some(self.parsed_content.clone(), |this, markdown| {
|
||||
this.child(
|
||||
div()
|
||||
@@ -891,7 +892,6 @@ impl InfoPopover {
|
||||
.overflow_y_scroll()
|
||||
.max_w(max_size.width)
|
||||
.max_h(max_size.height)
|
||||
.p_2()
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.child(
|
||||
MarkdownElement::new(markdown, hover_markdown_style(window, cx))
|
||||
|
||||
@@ -30,7 +30,6 @@ chrono.workspace = true
|
||||
clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
debug_adapter_extension.workspace = true
|
||||
dirs.workspace = true
|
||||
dotenv.workspace = true
|
||||
env_logger.workspace = true
|
||||
|
||||
@@ -28,6 +28,17 @@ impl AssertionsReport {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error(msg: String) -> Self {
|
||||
let assert = RanAssertion {
|
||||
id: "no-unhandled-errors".into(),
|
||||
result: Err(msg),
|
||||
};
|
||||
AssertionsReport {
|
||||
ran: vec![assert],
|
||||
max: Some(1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.ran.is_empty()
|
||||
}
|
||||
@@ -145,7 +156,9 @@ pub fn print_table_divider() {
|
||||
}
|
||||
|
||||
fn truncate(assertion: &str, max_width: usize) -> String {
|
||||
if assertion.len() <= max_width {
|
||||
let is_verbose = std::env::var("VERBOSE").is_ok_and(|v| !v.is_empty());
|
||||
|
||||
if assertion.len() <= max_width || is_verbose {
|
||||
assertion.to_string()
|
||||
} else {
|
||||
let mut end_ix = max_width - 1;
|
||||
|
||||
@@ -6,7 +6,7 @@ mod ids;
|
||||
mod instance;
|
||||
mod tool_metrics;
|
||||
|
||||
use assertions::display_error_row;
|
||||
use assertions::{AssertionsReport, display_error_row};
|
||||
use instance::{ExampleInstance, JudgeOutput, RunOutput, run_git};
|
||||
pub(crate) use tool_metrics::*;
|
||||
|
||||
@@ -422,7 +422,6 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
|
||||
let extension_host_proxy = ExtensionHostProxy::global(cx);
|
||||
|
||||
language::init(cx);
|
||||
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
|
||||
language_extension::init(extension_host_proxy.clone(), languages.clone());
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
@@ -468,11 +467,12 @@ pub fn find_model(
|
||||
match matching_models.as_slice() {
|
||||
[model] => Ok(model.clone()),
|
||||
[] => Err(anyhow!(
|
||||
"No language model with ID {} was available. Available models: {}",
|
||||
"No language model with ID {}/{} was available. Available models: {}",
|
||||
provider_id,
|
||||
model_id,
|
||||
model_registry
|
||||
.available_models(cx)
|
||||
.map(|model| model.id().0.clone())
|
||||
.map(|model| format!("{}/{}", model.provider_id().0, model.id().0))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)),
|
||||
@@ -582,12 +582,15 @@ fn print_report(
|
||||
Err(err) => {
|
||||
display_error_row(&mut table_rows, example.repetition, err.to_string())?;
|
||||
error_count += 1;
|
||||
programmatic_scores.push(0.0);
|
||||
diff_scores.push(0.0);
|
||||
thread_scores.push(0.0);
|
||||
}
|
||||
Ok((run_output, judge_output)) => {
|
||||
cumulative_tool_metrics.merge(&run_output.tool_metrics);
|
||||
example_cumulative_tool_metrics.merge(&run_output.tool_metrics);
|
||||
|
||||
if !run_output.programmatic_assertions.total_count() > 0 {
|
||||
if run_output.programmatic_assertions.total_count() > 0 {
|
||||
for assertion in &run_output.programmatic_assertions.ran {
|
||||
assertions::display_table_row(
|
||||
&mut table_rows,
|
||||
@@ -627,6 +630,8 @@ fn print_report(
|
||||
}
|
||||
}
|
||||
|
||||
let mut all_asserts = Vec::new();
|
||||
|
||||
if !table_rows.is_empty() {
|
||||
assertions::print_table_header();
|
||||
print!("{}", table_rows);
|
||||
@@ -635,33 +640,29 @@ fn print_report(
|
||||
|
||||
for (example, result) in results.iter() {
|
||||
if let Ok((run_output, judge_output)) = result {
|
||||
let asserts = [
|
||||
run_output.programmatic_assertions.clone(),
|
||||
judge_output.diff.clone(),
|
||||
judge_output.thread.clone(),
|
||||
];
|
||||
all_asserts.extend_from_slice(&asserts);
|
||||
assertions::print_table_round_summary(
|
||||
&example.repetition.to_string(),
|
||||
[
|
||||
&run_output.programmatic_assertions,
|
||||
&judge_output.diff,
|
||||
&judge_output.thread,
|
||||
]
|
||||
.into_iter(),
|
||||
asserts.iter(),
|
||||
)
|
||||
} else if let Err(err) = result {
|
||||
let assert = AssertionsReport::error(err.to_string());
|
||||
all_asserts.push(assert.clone());
|
||||
assertions::print_table_round_summary(
|
||||
&example.repetition.to_string(),
|
||||
[assert].iter(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
assertions::print_table_divider();
|
||||
|
||||
assertions::print_table_round_summary(
|
||||
"avg",
|
||||
results.iter().flat_map(|(_, result)| {
|
||||
result.iter().flat_map(|(run_output, judge_output)| {
|
||||
[
|
||||
&run_output.programmatic_assertions,
|
||||
&judge_output.diff,
|
||||
&judge_output.thread,
|
||||
]
|
||||
.into_iter()
|
||||
})
|
||||
}),
|
||||
);
|
||||
assertions::print_table_round_summary("avg", all_asserts.iter());
|
||||
|
||||
assertions::print_table_footer();
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ serde_json.workspace = true
|
||||
task.workspace = true
|
||||
toml.workspace = true
|
||||
util.workspace = true
|
||||
wasi-preview1-component-adapter-provider.workspace = true
|
||||
wasm-encoder.workspace = true
|
||||
wasmparser.workspace = true
|
||||
wit-component.workspace = true
|
||||
|
||||
@@ -141,7 +141,6 @@ pub trait Extension: Send + Sync + 'static {
|
||||
dap_name: Arc<str>,
|
||||
config: DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<DebugAdapterBinary>;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ use crate::{
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use futures::AsyncReadExt;
|
||||
use futures::io::BufReader;
|
||||
use heck::ToSnakeCase;
|
||||
use http_client::{self, AsyncBody, HttpClient};
|
||||
@@ -15,6 +14,7 @@ use std::{
|
||||
process::Stdio,
|
||||
sync::Arc,
|
||||
};
|
||||
use wasi_preview1_component_adapter_provider::WASI_SNAPSHOT_PREVIEW1_REACTOR_ADAPTER;
|
||||
use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _};
|
||||
use wasmparser::Parser;
|
||||
use wit_component::ComponentEncoder;
|
||||
@@ -26,7 +26,6 @@ use wit_component::ComponentEncoder;
|
||||
/// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will
|
||||
/// not need the adapter anymore.
|
||||
const RUST_TARGET: &str = "wasm32-wasip1";
|
||||
const WASI_ADAPTER_URL: &str = "https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm";
|
||||
|
||||
/// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc
|
||||
/// and clang's runtime library. The `wasi-sdk` provides these binaries.
|
||||
@@ -137,7 +136,6 @@ impl ExtensionBuilder {
|
||||
options: CompileExtensionOptions,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
self.install_rust_wasm_target_if_needed()?;
|
||||
let adapter_bytes = self.install_wasi_preview1_adapter_if_needed().await?;
|
||||
|
||||
let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
|
||||
let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
|
||||
@@ -186,7 +184,10 @@ impl ExtensionBuilder {
|
||||
|
||||
let mut encoder = ComponentEncoder::default()
|
||||
.module(&wasm_bytes)?
|
||||
.adapter("wasi_snapshot_preview1", &adapter_bytes)
|
||||
.adapter(
|
||||
"wasi_snapshot_preview1",
|
||||
WASI_SNAPSHOT_PREVIEW1_REACTOR_ADAPTER,
|
||||
)
|
||||
.context("failed to load adapter module")?
|
||||
.validate(true);
|
||||
|
||||
@@ -395,38 +396,6 @@ impl ExtensionBuilder {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_wasi_preview1_adapter_if_needed(&self) -> Result<Vec<u8>> {
|
||||
let cache_path = self.cache_dir.join("wasi_snapshot_preview1.reactor.wasm");
|
||||
if let Ok(content) = fs::read(&cache_path) {
|
||||
if Parser::is_core_wasm(&content) {
|
||||
return Ok(content);
|
||||
}
|
||||
}
|
||||
|
||||
fs::remove_file(&cache_path).ok();
|
||||
|
||||
log::info!(
|
||||
"downloading wasi adapter module to {}",
|
||||
cache_path.display()
|
||||
);
|
||||
let mut response = self
|
||||
.http
|
||||
.get(WASI_ADAPTER_URL, AsyncBody::default(), true)
|
||||
.await?;
|
||||
|
||||
let mut content = Vec::new();
|
||||
let mut body = BufReader::new(response.body_mut());
|
||||
body.read_to_end(&mut content).await?;
|
||||
|
||||
fs::write(&cache_path, &content)
|
||||
.with_context(|| format!("failed to save file {}", cache_path.display()))?;
|
||||
|
||||
if !Parser::is_core_wasm(&content) {
|
||||
bail!("downloaded wasi adapter is invalid");
|
||||
}
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
async fn install_wasi_sdk_if_needed(&self) -> Result<PathBuf> {
|
||||
let url = if let Some(asset_name) = WASI_SDK_ASSET_NAME {
|
||||
format!("{WASI_SDK_URL}/{asset_name}")
|
||||
|
||||
@@ -87,8 +87,6 @@ pub struct ExtensionManifest {
|
||||
pub snippets: Option<PathBuf>,
|
||||
#[serde(default)]
|
||||
pub capabilities: Vec<ExtensionCapability>,
|
||||
#[serde(default)]
|
||||
pub debug_adapters: Vec<Arc<str>>,
|
||||
}
|
||||
|
||||
impl ExtensionManifest {
|
||||
@@ -276,7 +274,6 @@ fn manifest_from_old_manifest(
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
capabilities: Vec::new(),
|
||||
debug_adapters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -304,7 +301,6 @@ mod tests {
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
capabilities: vec![],
|
||||
debug_adapters: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,11 +19,6 @@ pub use wit::{
|
||||
KeyValueStore, LanguageServerInstallationStatus, Project, Range, Worktree, download_file,
|
||||
make_file_executable,
|
||||
zed::extension::context_server::ContextServerConfiguration,
|
||||
zed::extension::dap::{
|
||||
DebugAdapterBinary, DebugRequest, DebugTaskDefinition, StartDebuggingRequestArguments,
|
||||
StartDebuggingRequestArgumentsRequest, TcpArguments, TcpArgumentsTemplate,
|
||||
resolve_tcp_template,
|
||||
},
|
||||
zed::extension::github::{
|
||||
GithubRelease, GithubReleaseAsset, GithubReleaseOptions, github_release_by_tag_name,
|
||||
latest_github_release,
|
||||
@@ -199,7 +194,6 @@ pub trait Extension: Send + Sync {
|
||||
_adapter_name: String,
|
||||
_config: DebugTaskDefinition,
|
||||
_user_provided_path: Option<String>,
|
||||
_worktree: &Worktree,
|
||||
) -> Result<DebugAdapterBinary, String> {
|
||||
Err("`get_dap_binary` not implemented".to_string())
|
||||
}
|
||||
@@ -392,9 +386,8 @@ impl wit::Guest for Component {
|
||||
adapter_name: String,
|
||||
config: DebugTaskDefinition,
|
||||
user_installed_path: Option<String>,
|
||||
worktree: &Worktree,
|
||||
) -> Result<wit::DebugAdapterBinary, String> {
|
||||
extension().get_dap_binary(adapter_name, config, user_installed_path, worktree)
|
||||
) -> Result<DebugAdapterBinary, String> {
|
||||
extension().get_dap_binary(adapter_name, config, user_installed_path)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
interface dap {
|
||||
use common.{env-vars};
|
||||
|
||||
/// Resolves a specified TcpArgumentsTemplate into TcpArguments
|
||||
resolve-tcp-template: func(template: tcp-arguments-template) -> result<tcp-arguments, string>;
|
||||
|
||||
record launch-request {
|
||||
program: string,
|
||||
cwd: option<string>,
|
||||
|
||||
@@ -11,7 +11,7 @@ world extension {
|
||||
|
||||
use common.{env-vars, range};
|
||||
use context-server.{context-server-configuration};
|
||||
use dap.{debug-adapter-binary, debug-task-definition, debug-request};
|
||||
use dap.{debug-adapter-binary, debug-task-definition};
|
||||
use lsp.{completion, symbol};
|
||||
use process.{command};
|
||||
use slash-command.{slash-command, slash-command-argument-completion, slash-command-output};
|
||||
@@ -157,5 +157,5 @@ world extension {
|
||||
export index-docs: func(provider-name: string, package-name: string, database: borrow<key-value-store>) -> result<_, string>;
|
||||
|
||||
/// Returns a configured debug adapter binary for a given debug task.
|
||||
export get-dap-binary: func(adapter-name: string, config: debug-task-definition, user-installed-path: option<string>, worktree: borrow<worktree>) -> result<debug-adapter-binary, string>;
|
||||
export get-dap-binary: func(adapter-name: string, config: debug-task-definition, user-installed-path: option<string>) -> result<debug-adapter-binary, string>;
|
||||
}
|
||||
|
||||
@@ -14,10 +14,9 @@ use collections::{BTreeMap, BTreeSet, HashMap, HashSet, btree_map};
|
||||
pub use extension::ExtensionManifest;
|
||||
use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
|
||||
use extension::{
|
||||
ExtensionContextServerProxy, ExtensionDebugAdapterProviderProxy, ExtensionEvents,
|
||||
ExtensionGrammarProxy, ExtensionHostProxy, ExtensionIndexedDocsProviderProxy,
|
||||
ExtensionLanguageProxy, ExtensionLanguageServerProxy, ExtensionSlashCommandProxy,
|
||||
ExtensionSnippetProxy, ExtensionThemeProxy,
|
||||
ExtensionContextServerProxy, ExtensionEvents, ExtensionGrammarProxy, ExtensionHostProxy,
|
||||
ExtensionIndexedDocsProviderProxy, ExtensionLanguageProxy, ExtensionLanguageServerProxy,
|
||||
ExtensionSlashCommandProxy, ExtensionSnippetProxy, ExtensionThemeProxy,
|
||||
};
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use futures::{
|
||||
@@ -1329,11 +1328,6 @@ impl ExtensionStore {
|
||||
this.proxy
|
||||
.register_indexed_docs_provider(extension.clone(), provider_id.clone());
|
||||
}
|
||||
|
||||
for debug_adapter in &manifest.debug_adapters {
|
||||
this.proxy
|
||||
.register_debug_adapter(extension.clone(), debug_adapter.clone());
|
||||
}
|
||||
}
|
||||
|
||||
this.wasm_extensions.extend(wasm_extensions);
|
||||
|
||||
@@ -164,7 +164,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
capabilities: Vec::new(),
|
||||
debug_adapters: Default::default(),
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
@@ -194,7 +193,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
capabilities: Vec::new(),
|
||||
debug_adapters: Default::default(),
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
@@ -369,7 +367,6 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
capabilities: Vec::new(),
|
||||
debug_adapters: Default::default(),
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
|
||||
@@ -379,13 +379,11 @@ impl extension::Extension for WasmExtension {
|
||||
dap_name: Arc<str>,
|
||||
config: DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(worktree)?;
|
||||
let dap_binary = extension
|
||||
.call_get_dap_binary(store, dap_name, config, user_installed_path, resource)
|
||||
.call_get_dap_binary(store, dap_name, config, user_installed_path)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
let dap_binary = dap_binary.try_into()?;
|
||||
|
||||
@@ -903,7 +903,6 @@ impl Extension {
|
||||
adapter_name: Arc<str>,
|
||||
task: DebugTaskDefinition,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
resource: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<Result<DebugAdapterBinary, String>> {
|
||||
match self {
|
||||
Extension::V0_6_0(ext) => {
|
||||
@@ -913,7 +912,6 @@ impl Extension {
|
||||
&adapter_name,
|
||||
&task.try_into()?,
|
||||
user_installed_path.as_ref().and_then(|p| p.to_str()),
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{e:?}"))?;
|
||||
|
||||
@@ -48,7 +48,7 @@ wasmtime::component::bindgen!({
|
||||
pub use self::zed::extension::*;
|
||||
|
||||
mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.6.0/settings.rs"));
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.5.0/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
@@ -729,29 +729,8 @@ impl slash_command::Host for WasmState {}
|
||||
#[async_trait]
|
||||
impl context_server::Host for WasmState {}
|
||||
|
||||
impl dap::Host for WasmState {
|
||||
async fn resolve_tcp_template(
|
||||
&mut self,
|
||||
template: TcpArgumentsTemplate,
|
||||
) -> wasmtime::Result<Result<TcpArguments, String>> {
|
||||
maybe!(async {
|
||||
let (host, port, timeout) =
|
||||
::dap::configure_tcp_connection(task::TcpArgumentsTemplate {
|
||||
port: template.port,
|
||||
host: template.host.map(Ipv4Addr::from_bits),
|
||||
timeout: template.timeout,
|
||||
})
|
||||
.await?;
|
||||
Ok(TcpArguments {
|
||||
port,
|
||||
host: host.to_bits(),
|
||||
timeout,
|
||||
})
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
}
|
||||
#[async_trait]
|
||||
impl dap::Host for WasmState {}
|
||||
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn get_settings(
|
||||
|
||||
@@ -23,6 +23,7 @@ fs.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
num-format.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
@@ -37,9 +38,9 @@ theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
vim_mode_setting.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -132,10 +132,13 @@ pub fn init(cx: &mut App) {
|
||||
match install_task.await {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
log::error!("Failed to install dev extension: {:?}", err);
|
||||
workspace_handle
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.show_error(
|
||||
&err.context("failed to install dev extension"),
|
||||
// NOTE: using `anyhow::context` here ends up not printing
|
||||
// the error
|
||||
&format!("Failed to install dev extension: {}", err),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
@@ -955,7 +958,7 @@ impl ExtensionsPage {
|
||||
.disabled(true),
|
||||
configure: is_configurable.then(|| {
|
||||
Button::new(
|
||||
SharedString::from(format!("configure-{}", extension.id.clone())),
|
||||
SharedString::from(format!("configure-{}", extension.id)),
|
||||
"Configure",
|
||||
)
|
||||
.disabled(true)
|
||||
@@ -980,7 +983,7 @@ impl ExtensionsPage {
|
||||
}),
|
||||
configure: is_configurable.then(|| {
|
||||
Button::new(
|
||||
SharedString::from(format!("configure-{}", extension.id.clone())),
|
||||
SharedString::from(format!("configure-{}", extension.id)),
|
||||
"Configure",
|
||||
)
|
||||
.on_click({
|
||||
@@ -1049,7 +1052,7 @@ impl ExtensionsPage {
|
||||
.disabled(true),
|
||||
configure: is_configurable.then(|| {
|
||||
Button::new(
|
||||
SharedString::from(format!("configure-{}", extension.id.clone())),
|
||||
SharedString::from(format!("configure-{}", extension.id)),
|
||||
"Configure",
|
||||
)
|
||||
.disabled(true)
|
||||
|
||||
@@ -354,8 +354,9 @@ impl PickerDelegate for NewPathDelegate {
|
||||
let m = self.matches.get(self.selected_index)?;
|
||||
if m.is_dir(self.project.read(cx), cx) {
|
||||
let path = m.relative_path();
|
||||
self.last_selected_dir = Some(path.clone());
|
||||
Some(format!("{}/", path))
|
||||
let result = format!("{}/", path);
|
||||
self.last_selected_dir = Some(path);
|
||||
Some(result)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
||||
@@ -748,6 +748,7 @@ impl GitRepository for RealGitRepository {
|
||||
"--no-optional-locks",
|
||||
"cat-file",
|
||||
"--batch-check=%(objectname)",
|
||||
"-z",
|
||||
])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
@@ -760,7 +761,7 @@ impl GitRepository for RealGitRepository {
|
||||
.ok_or_else(|| anyhow!("no stdin for git cat-file subprocess"))?;
|
||||
let mut stdin = BufWriter::new(stdin);
|
||||
for rev in &revs {
|
||||
write!(&mut stdin, "{rev}\n")?;
|
||||
write!(&mut stdin, "{rev}\0")?;
|
||||
}
|
||||
drop(stdin);
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
linkify.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
markdown.workspace = true
|
||||
menu.workspace = true
|
||||
|
||||
@@ -2583,19 +2583,18 @@ impl GitPanel {
|
||||
} else {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let workspace_weak = cx.weak_entity();
|
||||
let toast =
|
||||
StatusToast::new(format!("git {} failed", action.clone()), cx, |this, _cx| {
|
||||
this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error))
|
||||
.action("View Log", move |window, cx| {
|
||||
let message = message.clone();
|
||||
let action = action.clone();
|
||||
workspace_weak
|
||||
.update(cx, move |workspace, cx| {
|
||||
Self::open_output(action, workspace, &message, window, cx)
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
});
|
||||
let toast = StatusToast::new(format!("git {} failed", action), cx, |this, _cx| {
|
||||
this.icon(ToastIcon::new(IconName::XCircle).color(Color::Error))
|
||||
.action("View Log", move |window, cx| {
|
||||
let message = message.clone();
|
||||
let action = action.clone();
|
||||
workspace_weak
|
||||
.update(cx, move |workspace, cx| {
|
||||
Self::open_output(action, workspace, &message, window, cx)
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
});
|
||||
workspace.toggle_status_toast(toast, cx)
|
||||
});
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user