Compare commits
20 Commits
rustls-pan
...
bash-timeo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bdb1768c33 | ||
|
|
f7c3c533a3 | ||
|
|
c05bf096f8 | ||
|
|
b15ee1b1cc | ||
|
|
0459b1d303 | ||
|
|
246013cfc2 | ||
|
|
47eaf274d6 | ||
|
|
ef4b5b0698 | ||
|
|
39c98ce882 | ||
|
|
763cc6dba3 | ||
|
|
0b75c13034 | ||
|
|
38ec45008c | ||
|
|
97641c3298 | ||
|
|
ca8f6e8a3f | ||
|
|
db53da49e1 | ||
|
|
df94dcdea6 | ||
|
|
1c85901440 | ||
|
|
9fb77ad176 | ||
|
|
feafad2f9d | ||
|
|
86ef00054b |
36
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
Normal file
36
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Bug Report (Agent Panel)
|
||||
description: Zed Agent Panel Bugs
|
||||
type: "Bug"
|
||||
labels: ["agent", "ai"]
|
||||
title: "Agent Panel: <a short description of the Agent Panel bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
<!-- Please include the LLM provider and model name you are using -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
51
.github/ISSUE_TEMPLATE/01_git_bug_report.yml
vendored
51
.github/ISSUE_TEMPLATE/01_git_bug_report.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Git Bug Report
|
||||
description: There is a bug related to Git features in Zed
|
||||
type: "Bug"
|
||||
labels: ["git"]
|
||||
title: "Git: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
51
.github/ISSUE_TEMPLATE/02_agent_bug_report.yml
vendored
51
.github/ISSUE_TEMPLATE/02_agent_bug_report.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Agent Panel Bug Report
|
||||
description: There is a bug related to the Agent Panel in Zed
|
||||
type: "Bug"
|
||||
labels: ["agent", "ai"]
|
||||
title: "Agent Panel: <a short description of the Agent Panel bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
@@ -1,5 +1,5 @@
|
||||
name: Edit Predictions Bug Report
|
||||
description: There is a bug related to Edit Predictions in Zed
|
||||
name: Bug Report (Edit Predictions)
|
||||
description: Zed Edit Predictions bugs
|
||||
type: "Bug"
|
||||
labels: ["ai", "inline completion", "zeta"]
|
||||
title: "Edit Predictions: <a short description of the Edit Prediction bug>"
|
||||
@@ -10,19 +10,21 @@ body:
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
<!-- Please include the LLM provider and model name you are using -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
@@ -32,20 +34,3 @@ body:
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
35
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
Normal file
35
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Bug Report (Git)
|
||||
description: Zed Git-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["git"]
|
||||
title: "Git: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
47
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
47
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -1,46 +1,44 @@
|
||||
name: Bug Report
|
||||
name: Bug Report (Other)
|
||||
description: |
|
||||
Something is broken in Zed (exclude crashing).
|
||||
Something else is broken in Zed (exclude crashing).
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
description: Provide a one sentence summary and detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
<!-- Be verbose: Include all steps necessary to reproduce from a clean Zed installation. -->
|
||||
<!-- Code snippets are better than images, a repository link that reproduces the issue is ideal. -->
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install.
|
||||
- Any code must be sufficient to reproduce (include context!)
|
||||
- Code must as text, not just as a screenshot.
|
||||
- Issues with insufficient detail may be summarily closed.
|
||||
-->
|
||||
|
||||
Steps to trigger the problem:
|
||||
Steps to reproduce:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
|
||||
Expected Behavior:
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
<!-- Before Submitting, did you:
|
||||
1. Include settings.json, keymap.json, .editorconfig if relevant?
|
||||
2. Check your Zed.log for relevant errors? (please include!)
|
||||
3. Click Preview to ensure everything looks right?
|
||||
4. Hide videos, large images and logs in ``` inside collapsible blocks:
|
||||
|
||||
<!--
|
||||
Is there anything additional necessary to reproduce this issue?
|
||||
- settings.json, keymap.json, .editorconfig etc?
|
||||
- Does it happen intermittently or only with specific projects / file types?
|
||||
- Have you found a workaround?
|
||||
<details><summary>click to expand</summary>
|
||||
|
||||
Did you check your Zed.log to see if there is any relevant details there?
|
||||
- When including large items (videos, screenshots, logs, configs) please wrap with:
|
||||
```json
|
||||
|
||||
<details><summary>See inside for XXXXYYY</summary>
|
||||
|
||||
```shell
|
||||
code
|
||||
```
|
||||
|
||||
</details>
|
||||
```
|
||||
</details>
|
||||
-->
|
||||
|
||||
validations:
|
||||
@@ -50,7 +48,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: Copy System Specs Into Clipboard"
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
|
||||
14
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
14
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -5,10 +5,12 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
description: Summarize the issue with detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
@@ -16,7 +18,6 @@ body:
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
@@ -40,10 +41,11 @@ body:
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -4,9 +4,6 @@ contact_links:
|
||||
- name: Feature Request
|
||||
url: https://github.com/zed-industries/zed/discussions/new/choose
|
||||
about: To request a feature, open a new Discussion in one of the appropriate Discussion categories
|
||||
- name: Zed Discussion Forum
|
||||
url: https://github.com/zed-industries/zed/discussions
|
||||
about: A community discussion forum
|
||||
- name: "Zed Discord: #Support Channel"
|
||||
- name: "Zed Discord"
|
||||
url: https://zed.dev/community-links
|
||||
about: Real-time discussion and user support
|
||||
|
||||
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@@ -114,7 +114,9 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Check workspace-hack crate
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
|
||||
53
Cargo.lock
generated
53
Cargo.lock
generated
@@ -746,7 +746,6 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"language_model",
|
||||
"lsp",
|
||||
"open",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
@@ -3330,6 +3329,15 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
|
||||
dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.8.0"
|
||||
@@ -4462,6 +4470,32 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "documented"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc6db32f0995bc4553d2de888999075acd0dbeef75ba923503f6a724263dc6f3"
|
||||
dependencies = [
|
||||
"documented-macros",
|
||||
"phf",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "documented-macros"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a394bb35929b58f9a5fd418f7c6b17a4b616efcc1e53e6995ca123948f87e5fa"
|
||||
dependencies = [
|
||||
"convert_case 0.6.0",
|
||||
"itertools 0.13.0",
|
||||
"optfield",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strum",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dotenvy"
|
||||
version = "0.15.7"
|
||||
@@ -7876,7 +7910,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -9543,6 +9577,17 @@ dependencies = [
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "optfield"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa59f025cde9c698fcb4fcb3533db4621795374065bee908215263488f2d2a1d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "option-ext"
|
||||
version = "0.2.0"
|
||||
@@ -14128,12 +14173,14 @@ name = "tasks_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"debugger_ui",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"file_icons",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"menu",
|
||||
"picker",
|
||||
@@ -15348,6 +15395,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"component",
|
||||
"documented",
|
||||
"gpui",
|
||||
"icons",
|
||||
"itertools 0.14.0",
|
||||
@@ -17613,6 +17661,7 @@ dependencies = [
|
||||
"indexmap",
|
||||
"inout",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.13.0",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"libsqlite3-sys",
|
||||
|
||||
@@ -532,6 +532,7 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"shift-enter": "editor::ConfirmCompletionReplace",
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -681,6 +681,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"shift-enter": "editor::ConfirmCompletionReplace",
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,11 +6,18 @@ You are an AI assistant integrated into a code editor. You have the programming
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
|
||||
You should only perform actions that modify the user's system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user’s system without explicit instruction.
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user's system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
|
||||
When answering questions, it's okay to give incomplete examples containing comments about what would go there in a real version. When being asked to directly perform tasks on the code base, you must ALWAYS make fully working code. You may never "simplify" the code by omitting or deleting functionality you know the user has requested, and you must NEVER write comments like "in a full version, this would..." - instead, you must actually implement the real version. Don't be lazy!
|
||||
|
||||
Note that project files are automatically backed up. The user can always get them back later if anything goes wrong, so there's
|
||||
no need to create backup files (e.g. `.bak` files) because these files will just take up unnecessary space on the user's disk.
|
||||
|
||||
When attempting to resolve issues around failing tests, never simply remove the failing tests. Unless the user explicitly asks you to remove tests, ALWAYS attempt to fix the code causing the tests to fail.
|
||||
|
||||
Ignore "TODO"-type comments unless they're relevant to the user's explicit request or the user specifically asks you to address them. It is, however, okay to include them in codebase summaries.
|
||||
|
||||
<style>
|
||||
Editing code:
|
||||
- Make sure to take previous edits into account.
|
||||
|
||||
@@ -1136,7 +1136,8 @@
|
||||
"code_actions_on_format": {},
|
||||
// Settings related to running tasks.
|
||||
"tasks": {
|
||||
"variables": {}
|
||||
"variables": {},
|
||||
"enabled": true
|
||||
},
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
@@ -1456,6 +1457,8 @@
|
||||
"lsp": {
|
||||
// Specify the LSP name as a key here.
|
||||
// "rust-analyzer": {
|
||||
// // A special flag for rust-analyzer integration, to use server-provided tasks
|
||||
// enable_lsp_tasks": true,
|
||||
// // These initialization options are merged into Zed's defaults
|
||||
// "initialization_options": {
|
||||
// "check": {
|
||||
|
||||
@@ -106,7 +106,7 @@ impl ContextPickerCompletionProvider {
|
||||
.iter()
|
||||
.map(|mode| {
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
replace_range: source_range.clone(),
|
||||
new_text: format!("@{} ", mode.mention_prefix()),
|
||||
label: CodeLabel::plain(mode.label().to_string(), None),
|
||||
icon_path: Some(mode.icon().path().into()),
|
||||
@@ -160,7 +160,7 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_thread(&thread_entry);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(thread_entry.summary.to_string(), None),
|
||||
documentation: None,
|
||||
@@ -205,7 +205,7 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_fetch(&url_to_fetch);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(url_to_fetch.to_string(), None),
|
||||
documentation: None,
|
||||
@@ -287,7 +287,7 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_file(&file_name, &full_path);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
documentation: None,
|
||||
@@ -350,7 +350,7 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_symbol(&symbol.name, &full_path);
|
||||
let new_text_len = new_text.len();
|
||||
Some(Completion {
|
||||
old_range: source_range.clone(),
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
documentation: None,
|
||||
|
||||
@@ -1414,7 +1414,7 @@ impl Thread {
|
||||
|
||||
for tool_use in pending_tool_uses.iter() {
|
||||
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
if tool.needs_confirmation()
|
||||
if tool.needs_confirmation(&tool_use.input, cx)
|
||||
&& !AssistantSettings::get_global(cx).always_allow_tool_actions
|
||||
{
|
||||
self.tool_use.confirm_tool_use(
|
||||
|
||||
@@ -201,7 +201,7 @@ impl ToolUseState {
|
||||
|
||||
let (icon, needs_confirmation) = if let Some(tool) = self.tools.tool(&tool_use.name, cx)
|
||||
{
|
||||
(tool.icon(), tool.needs_confirmation())
|
||||
(tool.icon(), tool.needs_confirmation(&tool_use.input, cx))
|
||||
} else {
|
||||
(IconName::Cog, false)
|
||||
};
|
||||
|
||||
@@ -120,7 +120,7 @@ impl SlashCommandCompletionProvider {
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
replace_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
@@ -219,7 +219,7 @@ impl SlashCommandCompletionProvider {
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
|
||||
@@ -48,7 +48,7 @@ pub trait Tool: 'static + Send + Sync {
|
||||
|
||||
/// Returns true iff the tool needs the users's confirmation
|
||||
/// before having permission to run.
|
||||
fn needs_confirmation(&self) -> bool;
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> serde_json::Value {
|
||||
|
||||
@@ -23,7 +23,6 @@ http_client.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
mod bash_tool;
|
||||
mod batch_tool;
|
||||
mod code_symbol_iter;
|
||||
mod code_symbols_tool;
|
||||
mod copy_path_tool;
|
||||
mod create_directory_tool;
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::schema::json_schema_for;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use futures::io::BufReader;
|
||||
use futures::{AsyncBufReadExt, AsyncReadExt};
|
||||
use futures::{AsyncBufReadExt, AsyncReadExt, FutureExt};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat};
|
||||
use project::Project;
|
||||
@@ -16,7 +16,7 @@ use util::markdown::MarkdownString;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BashToolInput {
|
||||
/// The bash command to execute as a one-liner.
|
||||
/// The bash one-liner command to execute.
|
||||
command: String,
|
||||
/// Working directory for the command. This must be one of the root directories of the project.
|
||||
cd: String,
|
||||
@@ -29,7 +29,7 @@ impl Tool for BashTool {
|
||||
"bash".to_string()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ impl Tool for BashTool {
|
||||
worktree.read(cx).abs_path()
|
||||
};
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
cx.spawn(async move |cx| {
|
||||
// Add 2>&1 to merge stderr into stdout for proper interleaving.
|
||||
let command = format!("({}) 2>&1", input.command);
|
||||
|
||||
@@ -158,14 +158,33 @@ impl Tool for BashTool {
|
||||
let mut buffer = vec![0; LIMIT + 1];
|
||||
let bytes_read = reader.read(&mut buffer).await?;
|
||||
|
||||
let mut timer = cx
|
||||
.background_executor()
|
||||
.timer(std::time::Duration::from_secs(10))
|
||||
.fuse();
|
||||
|
||||
// Repeatedly fill the output reader's buffer without copying it.
|
||||
loop {
|
||||
let skipped_bytes = reader.fill_buf().await?;
|
||||
if skipped_bytes.is_empty() {
|
||||
break;
|
||||
let mut skipped_bytes = reader.fill_buf().fuse();
|
||||
|
||||
futures::select! {
|
||||
skipped_bytes = skipped_bytes => {
|
||||
let skipped_bytes = skipped_bytes?;
|
||||
if skipped_bytes.is_empty() {
|
||||
break;
|
||||
}
|
||||
let skipped_bytes_len = skipped_bytes.len();
|
||||
reader.consume_unpin(skipped_bytes_len);
|
||||
|
||||
timer = cx
|
||||
.background_executor()
|
||||
.timer(std::time::Duration::from_secs(10))
|
||||
.fuse();
|
||||
}
|
||||
_ = timer => {
|
||||
return Err(anyhow!("Command timed out. Output so far:\n{}", String::from_utf8_lossy(&buffer[..bytes_read])));
|
||||
}
|
||||
}
|
||||
let skipped_bytes_len = skipped_bytes.len();
|
||||
reader.consume_unpin(skipped_bytes_len);
|
||||
}
|
||||
|
||||
let output_bytes = &buffer[..bytes_read];
|
||||
|
||||
@@ -151,8 +151,17 @@ impl Tool for BatchTool {
|
||||
"batch_tool".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
true
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool {
|
||||
serde_json::from_value::<BatchToolInput>(input.clone())
|
||||
.map(|input| {
|
||||
let working_set = ToolWorkingSet::default();
|
||||
input.invocations.iter().any(|invocation| {
|
||||
working_set
|
||||
.tool(&invocation.name, cx)
|
||||
.map_or(false, |tool| tool.needs_confirmation(&invocation.input, cx))
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
use project::DocumentSymbol;
|
||||
use regex::Regex;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Entry {
|
||||
pub name: String,
|
||||
pub kind: lsp::SymbolKind,
|
||||
pub depth: u32,
|
||||
pub start_line: usize,
|
||||
pub end_line: usize,
|
||||
}
|
||||
|
||||
/// An iterator that filters document symbols based on a regex pattern.
|
||||
/// This iterator recursively traverses the document symbol tree, incrementing depth for child symbols.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodeSymbolIterator<'a> {
|
||||
symbols: &'a [DocumentSymbol],
|
||||
regex: Option<Regex>,
|
||||
// Stack of (symbol, depth) pairs to process
|
||||
pending_symbols: Vec<(&'a DocumentSymbol, u32)>,
|
||||
current_index: usize,
|
||||
current_depth: u32,
|
||||
}
|
||||
|
||||
impl<'a> CodeSymbolIterator<'a> {
|
||||
pub fn new(symbols: &'a [DocumentSymbol], regex: Option<Regex>) -> Self {
|
||||
Self {
|
||||
symbols,
|
||||
regex,
|
||||
pending_symbols: Vec::new(),
|
||||
current_index: 0,
|
||||
current_depth: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for CodeSymbolIterator<'_> {
|
||||
type Item = Entry;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some((symbol, depth)) = self.pending_symbols.pop() {
|
||||
for child in symbol.children.iter().rev() {
|
||||
self.pending_symbols.push((child, depth + 1));
|
||||
}
|
||||
|
||||
return Some(Entry {
|
||||
name: symbol.name.clone(),
|
||||
kind: symbol.kind,
|
||||
depth,
|
||||
start_line: symbol.range.start.0.row as usize,
|
||||
end_line: symbol.range.end.0.row as usize,
|
||||
});
|
||||
}
|
||||
|
||||
while self.current_index < self.symbols.len() {
|
||||
let regex = self.regex.as_ref();
|
||||
let symbol = &self.symbols[self.current_index];
|
||||
self.current_index += 1;
|
||||
|
||||
if regex.is_none_or(|regex| regex.is_match(&symbol.name)) {
|
||||
// Push in reverse order to maintain traversal order
|
||||
for child in symbol.children.iter().rev() {
|
||||
self.pending_symbols.push((child, self.current_depth + 1));
|
||||
}
|
||||
|
||||
return Some(Entry {
|
||||
name: symbol.name.clone(),
|
||||
kind: symbol.kind,
|
||||
depth: self.current_depth,
|
||||
start_line: symbol.range.start.0.row as usize,
|
||||
end_line: symbol.range.end.0.row as usize,
|
||||
});
|
||||
} else {
|
||||
// Even if parent doesn't match, push children to check them later
|
||||
for child in symbol.children.iter().rev() {
|
||||
self.pending_symbols.push((child, self.current_depth + 1));
|
||||
}
|
||||
|
||||
// Check if any pending children match our criteria
|
||||
if let Some(result) = self.next() {
|
||||
return Some(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,24 +1,21 @@
|
||||
use std::fmt::{self, Write};
|
||||
use std::fmt::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use collections::IndexMap;
|
||||
use gpui::{App, AsyncApp, Entity, Task};
|
||||
use language::{CodeLabel, Language, LanguageRegistry};
|
||||
use language::{OutlineItem, ParseStatus, Point};
|
||||
use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat};
|
||||
use lsp::SymbolKind;
|
||||
use project::{DocumentSymbol, Project, Symbol};
|
||||
use project::{Project, Symbol};
|
||||
use regex::{Regex, RegexBuilder};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownString;
|
||||
|
||||
use crate::code_symbol_iter::{CodeSymbolIterator, Entry};
|
||||
use crate::schema::json_schema_for;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct CodeSymbolsInput {
|
||||
/// The relative path of the source code file to read and get the symbols for.
|
||||
@@ -82,7 +79,7 @@ impl Tool for CodeSymbolsTool {
|
||||
"code_symbols".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -180,24 +177,28 @@ pub async fn file_outline(
|
||||
action_log.buffer_read(buffer.clone(), cx);
|
||||
})?;
|
||||
|
||||
let symbols = project
|
||||
.update(cx, |project, cx| project.document_symbols(&buffer, cx))?
|
||||
.await?;
|
||||
// Wait until the buffer has been fully parsed, so that we can read its outline.
|
||||
let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
|
||||
while parse_status
|
||||
.recv()
|
||||
.await
|
||||
.map_or(false, |status| status != ParseStatus::Idle)
|
||||
{}
|
||||
|
||||
if symbols.is_empty() {
|
||||
return Err(
|
||||
if buffer.read_with(cx, |buffer, _| buffer.snapshot().is_empty())? {
|
||||
anyhow!("This file is empty.")
|
||||
} else {
|
||||
anyhow!("No outline information available for this file.")
|
||||
},
|
||||
);
|
||||
}
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let Some(outline) = snapshot.outline(None) else {
|
||||
return Err(anyhow!("No outline information available for this file."));
|
||||
};
|
||||
|
||||
let language = buffer.read_with(cx, |buffer, _| buffer.language().cloned())?;
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone())?;
|
||||
|
||||
render_outline(&symbols, language, language_registry, regex, offset).await
|
||||
render_outline(
|
||||
outline
|
||||
.items
|
||||
.into_iter()
|
||||
.map(|item| item.to_point(&snapshot)),
|
||||
regex,
|
||||
offset,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn project_symbols(
|
||||
@@ -292,61 +293,27 @@ async fn project_symbols(
|
||||
}
|
||||
|
||||
async fn render_outline(
|
||||
symbols: &[DocumentSymbol],
|
||||
language: Option<Arc<Language>>,
|
||||
registry: Arc<LanguageRegistry>,
|
||||
items: impl IntoIterator<Item = OutlineItem<Point>>,
|
||||
regex: Option<Regex>,
|
||||
offset: u32,
|
||||
) -> Result<String> {
|
||||
const RESULTS_PER_PAGE_USIZE: usize = RESULTS_PER_PAGE as usize;
|
||||
let entries = CodeSymbolIterator::new(symbols, regex.clone())
|
||||
.skip(offset as usize)
|
||||
// Take 1 more than RESULTS_PER_PAGE so we can tell if there are more results.
|
||||
.take(RESULTS_PER_PAGE_USIZE.saturating_add(1))
|
||||
.collect::<Vec<Entry>>();
|
||||
let has_more = entries.len() > RESULTS_PER_PAGE_USIZE;
|
||||
|
||||
// Get language-specific labels, if available
|
||||
let labels = match &language {
|
||||
Some(lang) => {
|
||||
let entries_for_labels: Vec<(String, SymbolKind)> = entries
|
||||
.iter()
|
||||
.take(RESULTS_PER_PAGE_USIZE)
|
||||
.map(|entry| (entry.name.clone(), entry.kind))
|
||||
.collect();
|
||||
let mut items = items.into_iter().skip(offset as usize);
|
||||
|
||||
let lang_name = lang.name();
|
||||
if let Some(lsp_adapter) = registry.lsp_adapters(&lang_name).first().cloned() {
|
||||
lsp_adapter
|
||||
.labels_for_symbols(&entries_for_labels, lang)
|
||||
.await
|
||||
.ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let entries = items
|
||||
.by_ref()
|
||||
.filter(|item| {
|
||||
regex
|
||||
.as_ref()
|
||||
.is_none_or(|regex| regex.is_match(&item.text))
|
||||
})
|
||||
.take(RESULTS_PER_PAGE_USIZE)
|
||||
.collect::<Vec<_>>();
|
||||
let has_more = items.next().is_some();
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
let entries_rendered = match &labels {
|
||||
Some(label_list) => render_entries(
|
||||
&mut output,
|
||||
entries
|
||||
.into_iter()
|
||||
.take(RESULTS_PER_PAGE_USIZE)
|
||||
.zip(label_list.iter())
|
||||
.map(|(entry, label)| (entry, label.as_ref())),
|
||||
),
|
||||
None => render_entries(
|
||||
&mut output,
|
||||
entries
|
||||
.into_iter()
|
||||
.take(RESULTS_PER_PAGE_USIZE)
|
||||
.map(|entry| (entry, None)),
|
||||
),
|
||||
};
|
||||
let entries_rendered = render_entries(&mut output, entries);
|
||||
|
||||
// Calculate pagination information
|
||||
let page_start = offset + 1;
|
||||
@@ -372,31 +339,19 @@ async fn render_outline(
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn render_entries<'a>(
|
||||
output: &mut String,
|
||||
entries: impl IntoIterator<Item = (Entry, Option<&'a CodeLabel>)>,
|
||||
) -> u32 {
|
||||
fn render_entries(output: &mut String, items: impl IntoIterator<Item = OutlineItem<Point>>) -> u32 {
|
||||
let mut entries_rendered = 0;
|
||||
|
||||
for (entry, label) in entries {
|
||||
for item in items {
|
||||
// Indent based on depth ("" for level 0, " " for level 1, etc.)
|
||||
for _ in 0..entry.depth {
|
||||
output.push_str(" ");
|
||||
}
|
||||
|
||||
match label {
|
||||
Some(label) => {
|
||||
output.push_str(label.text());
|
||||
}
|
||||
None => {
|
||||
write_symbol_kind(output, entry.kind).ok();
|
||||
output.push_str(&entry.name);
|
||||
}
|
||||
for _ in 0..item.depth {
|
||||
output.push(' ');
|
||||
}
|
||||
output.push_str(&item.text);
|
||||
|
||||
// Add position information - convert to 1-based line numbers for display
|
||||
let start_line = entry.start_line + 1;
|
||||
let end_line = entry.end_line + 1;
|
||||
let start_line = item.range.start.row + 1;
|
||||
let end_line = item.range.end.row + 1;
|
||||
|
||||
if start_line == end_line {
|
||||
writeln!(output, " [L{}]", start_line).ok();
|
||||
@@ -408,38 +363,3 @@ fn render_entries<'a>(
|
||||
|
||||
entries_rendered
|
||||
}
|
||||
|
||||
// We may not have a language server adapter to have language-specific
|
||||
// ways to translate SymbolKnd into a string. In that situation,
|
||||
// fall back on some reasonable default strings to render.
|
||||
fn write_symbol_kind(buf: &mut String, kind: SymbolKind) -> Result<(), fmt::Error> {
|
||||
match kind {
|
||||
SymbolKind::FILE => write!(buf, "file "),
|
||||
SymbolKind::MODULE => write!(buf, "module "),
|
||||
SymbolKind::NAMESPACE => write!(buf, "namespace "),
|
||||
SymbolKind::PACKAGE => write!(buf, "package "),
|
||||
SymbolKind::CLASS => write!(buf, "class "),
|
||||
SymbolKind::METHOD => write!(buf, "method "),
|
||||
SymbolKind::PROPERTY => write!(buf, "property "),
|
||||
SymbolKind::FIELD => write!(buf, "field "),
|
||||
SymbolKind::CONSTRUCTOR => write!(buf, "constructor "),
|
||||
SymbolKind::ENUM => write!(buf, "enum "),
|
||||
SymbolKind::INTERFACE => write!(buf, "interface "),
|
||||
SymbolKind::FUNCTION => write!(buf, "function "),
|
||||
SymbolKind::VARIABLE => write!(buf, "variable "),
|
||||
SymbolKind::CONSTANT => write!(buf, "constant "),
|
||||
SymbolKind::STRING => write!(buf, "string "),
|
||||
SymbolKind::NUMBER => write!(buf, "number "),
|
||||
SymbolKind::BOOLEAN => write!(buf, "boolean "),
|
||||
SymbolKind::ARRAY => write!(buf, "array "),
|
||||
SymbolKind::OBJECT => write!(buf, "object "),
|
||||
SymbolKind::KEY => write!(buf, "key "),
|
||||
SymbolKind::NULL => write!(buf, "null "),
|
||||
SymbolKind::ENUM_MEMBER => write!(buf, "enum member "),
|
||||
SymbolKind::STRUCT => write!(buf, "struct "),
|
||||
SymbolKind::EVENT => write!(buf, "event "),
|
||||
SymbolKind::OPERATOR => write!(buf, "operator "),
|
||||
SymbolKind::TYPE_PARAMETER => write!(buf, "type parameter "),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ impl Tool for CopyPathTool {
|
||||
"copy_path".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Tool for CreateDirectoryTool {
|
||||
"create_directory".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ impl Tool for CreateFileTool {
|
||||
"create_file".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Tool for DeletePathTool {
|
||||
"delete_path".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ impl Tool for DiagnosticsTool {
|
||||
"diagnostics".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ impl Tool for FetchTool {
|
||||
"fetch".to_string()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ impl Tool for FindReplaceFileTool {
|
||||
"find_replace_file".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ impl Tool for ListDirectoryTool {
|
||||
"list_directory".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ impl Tool for MovePathTool {
|
||||
"move_path".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Tool for NowTool {
|
||||
"now".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ impl Tool for OpenTool {
|
||||
"open".to_string()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ impl Tool for PathSearchTool {
|
||||
"path_search".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::code_symbols_tool::file_outline;
|
||||
use crate::schema::json_schema_for;
|
||||
use crate::{code_symbols_tool::file_outline, schema::json_schema_for};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
@@ -16,7 +15,7 @@ use util::markdown::MarkdownString;
|
||||
/// If the model requests to read a file whose size exceeds this, then
|
||||
/// the tool will return an error along with the model's symbol outline,
|
||||
/// and suggest trying again using line ranges from the outline.
|
||||
const MAX_FILE_SIZE_TO_READ: usize = 4096;
|
||||
const MAX_FILE_SIZE_TO_READ: usize = 16384;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ReadFileToolInput {
|
||||
@@ -52,7 +51,7 @@ impl Tool for ReadFileTool {
|
||||
"read_file".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ impl Tool for RegexSearchTool {
|
||||
"regex_search".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ impl Tool for SymbolInfoTool {
|
||||
"symbol_info".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ impl Tool for ThinkingTool {
|
||||
"thinking".to_string()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
||||
@@ -318,6 +318,7 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::LspExtExpandMacro>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::LspExtOpenDocs>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::LspExtRunnables>)
|
||||
.add_request_handler(
|
||||
forward_read_only_project_request::<proto::LspExtSwitchSourceHeader>,
|
||||
)
|
||||
|
||||
@@ -309,7 +309,7 @@ impl MessageEditor {
|
||||
.map(|mat| {
|
||||
let (new_text, label) = completion_fn(&mat);
|
||||
Completion {
|
||||
old_range: range.clone(),
|
||||
replace_range: range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
icon_path: None,
|
||||
|
||||
@@ -3,37 +3,62 @@ use std::ops::{Deref, DerefMut};
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{AnyElement, App, IntoElement, RenderOnce, SharedString, Window, div, prelude::*, px};
|
||||
use gpui::{
|
||||
AnyElement, App, IntoElement, RenderOnce, SharedString, Window, div, pattern_slash, prelude::*,
|
||||
px, rems,
|
||||
};
|
||||
use linkme::distributed_slice;
|
||||
use parking_lot::RwLock;
|
||||
use theme::ActiveTheme;
|
||||
|
||||
pub trait Component {
|
||||
fn scope() -> Option<ComponentScope>;
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::None
|
||||
}
|
||||
fn name() -> &'static str {
|
||||
std::any::type_name::<Self>()
|
||||
}
|
||||
/// Returns a name that the component should be sorted by.
|
||||
///
|
||||
/// Implement this if the component should be sorted in an alternate order than its name.
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
/// For example, to group related components together when sorted:
|
||||
///
|
||||
/// - Button -> ButtonA
|
||||
/// - IconButton -> ButtonBIcon
|
||||
/// - ToggleButton -> ButtonCToggle
|
||||
///
|
||||
/// This naming scheme keeps these components together and allows them to /// be sorted in a logical order.
|
||||
fn sort_name() -> &'static str {
|
||||
Self::name()
|
||||
}
|
||||
fn description() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ComponentPreview: Component {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement;
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[distributed_slice]
|
||||
pub static __ALL_COMPONENTS: [fn()] = [..];
|
||||
|
||||
#[distributed_slice]
|
||||
pub static __ALL_PREVIEWS: [fn()] = [..];
|
||||
|
||||
pub static COMPONENT_DATA: LazyLock<RwLock<ComponentRegistry>> =
|
||||
LazyLock::new(|| RwLock::new(ComponentRegistry::new()));
|
||||
|
||||
pub struct ComponentRegistry {
|
||||
components: Vec<(Option<ComponentScope>, &'static str, Option<&'static str>)>,
|
||||
previews: HashMap<&'static str, fn(&mut Window, &mut App) -> AnyElement>,
|
||||
components: Vec<(
|
||||
ComponentScope,
|
||||
// name
|
||||
&'static str,
|
||||
// sort name
|
||||
&'static str,
|
||||
// description
|
||||
Option<&'static str>,
|
||||
)>,
|
||||
previews: HashMap<&'static str, fn(&mut Window, &mut App) -> Option<AnyElement>>,
|
||||
}
|
||||
|
||||
impl ComponentRegistry {
|
||||
@@ -47,30 +72,16 @@ impl ComponentRegistry {
|
||||
|
||||
pub fn init() {
|
||||
let component_fns: Vec<_> = __ALL_COMPONENTS.iter().cloned().collect();
|
||||
let preview_fns: Vec<_> = __ALL_PREVIEWS.iter().cloned().collect();
|
||||
|
||||
for f in component_fns {
|
||||
f();
|
||||
}
|
||||
for f in preview_fns {
|
||||
f();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_component<T: Component>() {
|
||||
let component_data = (T::scope(), T::name(), T::description());
|
||||
COMPONENT_DATA.write().components.push(component_data);
|
||||
}
|
||||
|
||||
pub fn register_preview<T: ComponentPreview>() {
|
||||
let preview_data = (
|
||||
T::name(),
|
||||
T::preview as fn(&mut Window, &mut App) -> AnyElement,
|
||||
);
|
||||
COMPONENT_DATA
|
||||
.write()
|
||||
.previews
|
||||
.insert(preview_data.0, preview_data.1);
|
||||
let component_data = (T::scope(), T::name(), T::sort_name(), T::description());
|
||||
let mut data = COMPONENT_DATA.write();
|
||||
data.components.push(component_data);
|
||||
data.previews.insert(T::name(), T::preview);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
@@ -80,29 +91,41 @@ pub struct ComponentId(pub &'static str);
|
||||
pub struct ComponentMetadata {
|
||||
id: ComponentId,
|
||||
name: SharedString,
|
||||
scope: Option<ComponentScope>,
|
||||
sort_name: SharedString,
|
||||
scope: ComponentScope,
|
||||
description: Option<SharedString>,
|
||||
preview: Option<fn(&mut Window, &mut App) -> AnyElement>,
|
||||
preview: Option<fn(&mut Window, &mut App) -> Option<AnyElement>>,
|
||||
}
|
||||
|
||||
impl ComponentMetadata {
|
||||
pub fn id(&self) -> ComponentId {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> SharedString {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
pub fn scope(&self) -> Option<ComponentScope> {
|
||||
self.scope.clone()
|
||||
pub fn sort_name(&self) -> SharedString {
|
||||
self.sort_name.clone()
|
||||
}
|
||||
|
||||
pub fn scopeless_name(&self) -> SharedString {
|
||||
self.name
|
||||
.clone()
|
||||
.split("::")
|
||||
.last()
|
||||
.unwrap_or(&self.name)
|
||||
.to_string()
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn scope(&self) -> ComponentScope {
|
||||
self.scope.clone()
|
||||
}
|
||||
pub fn description(&self) -> Option<SharedString> {
|
||||
self.description.clone()
|
||||
}
|
||||
|
||||
pub fn preview(&self) -> Option<fn(&mut Window, &mut App) -> AnyElement> {
|
||||
pub fn preview(&self) -> Option<fn(&mut Window, &mut App) -> Option<AnyElement>> {
|
||||
self.preview
|
||||
}
|
||||
}
|
||||
@@ -113,26 +136,18 @@ impl AllComponents {
|
||||
pub fn new() -> Self {
|
||||
AllComponents(HashMap::default())
|
||||
}
|
||||
|
||||
/// Returns all components with previews
|
||||
pub fn all_previews(&self) -> Vec<&ComponentMetadata> {
|
||||
self.0.values().filter(|c| c.preview.is_some()).collect()
|
||||
}
|
||||
|
||||
/// Returns all components with previews sorted by name
|
||||
pub fn all_previews_sorted(&self) -> Vec<ComponentMetadata> {
|
||||
let mut previews: Vec<ComponentMetadata> =
|
||||
self.all_previews().into_iter().cloned().collect();
|
||||
previews.sort_by_key(|a| a.name());
|
||||
previews
|
||||
}
|
||||
|
||||
/// Returns all components
|
||||
pub fn all(&self) -> Vec<&ComponentMetadata> {
|
||||
self.0.values().collect()
|
||||
}
|
||||
|
||||
/// Returns all components sorted by name
|
||||
pub fn all_sorted(&self) -> Vec<ComponentMetadata> {
|
||||
let mut components: Vec<ComponentMetadata> = self.all().into_iter().cloned().collect();
|
||||
components.sort_by_key(|a| a.name());
|
||||
@@ -142,7 +157,6 @@ impl AllComponents {
|
||||
|
||||
impl Deref for AllComponents {
|
||||
type Target = HashMap<ComponentId, ComponentMetadata>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
@@ -157,139 +171,127 @@ impl DerefMut for AllComponents {
|
||||
pub fn components() -> AllComponents {
|
||||
let data = COMPONENT_DATA.read();
|
||||
let mut all_components = AllComponents::new();
|
||||
|
||||
for (scope, name, description) in &data.components {
|
||||
for (scope, name, sort_name, description) in &data.components {
|
||||
let preview = data.previews.get(name).cloned();
|
||||
let component_name = SharedString::new_static(name);
|
||||
let sort_name = SharedString::new_static(sort_name);
|
||||
let id = ComponentId(name);
|
||||
all_components.insert(
|
||||
id.clone(),
|
||||
ComponentMetadata {
|
||||
id,
|
||||
name: component_name,
|
||||
sort_name,
|
||||
scope: scope.clone(),
|
||||
description: description.map(Into::into),
|
||||
preview,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
all_components
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum ComponentScope {
|
||||
Layout,
|
||||
Input,
|
||||
Notification,
|
||||
Editor,
|
||||
Collaboration,
|
||||
DataDisplay,
|
||||
Editor,
|
||||
Images,
|
||||
Input,
|
||||
Layout,
|
||||
Loading,
|
||||
Navigation,
|
||||
None,
|
||||
Notification,
|
||||
Overlays,
|
||||
Status,
|
||||
Typography,
|
||||
VersionControl,
|
||||
Unknown(SharedString),
|
||||
}
|
||||
|
||||
impl Display for ComponentScope {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ComponentScope::Layout => write!(f, "Layout"),
|
||||
ComponentScope::Input => write!(f, "Input"),
|
||||
ComponentScope::Notification => write!(f, "Notification"),
|
||||
ComponentScope::Editor => write!(f, "Editor"),
|
||||
ComponentScope::Collaboration => write!(f, "Collaboration"),
|
||||
ComponentScope::DataDisplay => write!(f, "Data Display"),
|
||||
ComponentScope::Editor => write!(f, "Editor"),
|
||||
ComponentScope::Images => write!(f, "Images & Icons"),
|
||||
ComponentScope::Input => write!(f, "Forms & Input"),
|
||||
ComponentScope::Layout => write!(f, "Layout & Structure"),
|
||||
ComponentScope::Loading => write!(f, "Loading & Progress"),
|
||||
ComponentScope::Navigation => write!(f, "Navigation"),
|
||||
ComponentScope::None => write!(f, "Unsorted"),
|
||||
ComponentScope::Notification => write!(f, "Notification"),
|
||||
ComponentScope::Overlays => write!(f, "Overlays & Layering"),
|
||||
ComponentScope::Status => write!(f, "Status"),
|
||||
ComponentScope::Typography => write!(f, "Typography"),
|
||||
ComponentScope::VersionControl => write!(f, "Version Control"),
|
||||
ComponentScope::Unknown(name) => write!(f, "Unknown: {}", name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ComponentScope {
|
||||
fn from(value: &str) -> Self {
|
||||
match value {
|
||||
"Layout" => ComponentScope::Layout,
|
||||
"Input" => ComponentScope::Input,
|
||||
"Notification" => ComponentScope::Notification,
|
||||
"Editor" => ComponentScope::Editor,
|
||||
"Collaboration" => ComponentScope::Collaboration,
|
||||
"Version Control" | "VersionControl" => ComponentScope::VersionControl,
|
||||
_ => ComponentScope::Unknown(SharedString::new(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ComponentScope {
|
||||
fn from(value: String) -> Self {
|
||||
match value.as_str() {
|
||||
"Layout" => ComponentScope::Layout,
|
||||
"Input" => ComponentScope::Input,
|
||||
"Notification" => ComponentScope::Notification,
|
||||
"Editor" => ComponentScope::Editor,
|
||||
"Collaboration" => ComponentScope::Collaboration,
|
||||
"Version Control" | "VersionControl" => ComponentScope::VersionControl,
|
||||
_ => ComponentScope::Unknown(SharedString::new(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Which side of the preview to show labels on
|
||||
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ExampleLabelSide {
|
||||
/// Left side
|
||||
Left,
|
||||
/// Right side
|
||||
Right,
|
||||
/// Top side
|
||||
#[default]
|
||||
Top,
|
||||
/// Bottom side
|
||||
Bottom,
|
||||
}
|
||||
|
||||
/// A single example of a component.
|
||||
#[derive(IntoElement)]
|
||||
pub struct ComponentExample {
|
||||
variant_name: SharedString,
|
||||
element: AnyElement,
|
||||
label_side: ExampleLabelSide,
|
||||
grow: bool,
|
||||
pub variant_name: SharedString,
|
||||
pub description: Option<SharedString>,
|
||||
pub element: AnyElement,
|
||||
}
|
||||
|
||||
impl RenderOnce for ComponentExample {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
let base = div().flex();
|
||||
|
||||
let base = match self.label_side {
|
||||
ExampleLabelSide::Right => base.flex_row(),
|
||||
ExampleLabelSide::Left => base.flex_row_reverse(),
|
||||
ExampleLabelSide::Bottom => base.flex_col(),
|
||||
ExampleLabelSide::Top => base.flex_col_reverse(),
|
||||
};
|
||||
|
||||
base.gap_2()
|
||||
.p_2()
|
||||
.text_size(px(10.))
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.when(self.grow, |this| this.flex_1())
|
||||
.when(!self.grow, |this| this.flex_none())
|
||||
.child(self.element)
|
||||
.child(self.variant_name)
|
||||
div()
|
||||
.w_full()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.gap_3()
|
||||
.child(
|
||||
div()
|
||||
.child(self.variant_name.clone())
|
||||
.text_size(rems(1.25))
|
||||
.text_color(cx.theme().colors().text),
|
||||
)
|
||||
.when_some(self.description, |this, description| {
|
||||
this.child(
|
||||
div()
|
||||
.text_size(rems(0.9375))
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(description.clone()),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.w_full()
|
||||
.rounded_xl()
|
||||
.min_h(px(100.))
|
||||
.justify_center()
|
||||
.p_8()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.bg(pattern_slash(
|
||||
cx.theme().colors().surface_background.opacity(0.5),
|
||||
24.0,
|
||||
24.0,
|
||||
))
|
||||
.shadow_sm()
|
||||
.child(self.element),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl ComponentExample {
|
||||
/// Create a new example with the given variant name and example value.
|
||||
pub fn new(variant_name: impl Into<SharedString>, element: AnyElement) -> Self {
|
||||
Self {
|
||||
variant_name: variant_name.into(),
|
||||
element,
|
||||
label_side: ExampleLabelSide::default(),
|
||||
grow: false,
|
||||
description: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the example to grow to fill the available horizontal space.
|
||||
pub fn grow(mut self) -> Self {
|
||||
self.grow = true;
|
||||
pub fn description(mut self, description: impl Into<SharedString>) -> Self {
|
||||
self.description = Some(description.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -309,7 +311,7 @@ impl RenderOnce for ComponentExampleGroup {
|
||||
.flex_col()
|
||||
.text_sm()
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.when(self.grow, |this| this.w_full().flex_1())
|
||||
.w_full()
|
||||
.when_some(self.title, |this, title| {
|
||||
this.gap_4().child(
|
||||
div()
|
||||
@@ -336,7 +338,7 @@ impl RenderOnce for ComponentExampleGroup {
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.when(self.vertical, |this| this.flex_col())
|
||||
.flex_col()
|
||||
.items_start()
|
||||
.w_full()
|
||||
.gap_6()
|
||||
@@ -348,7 +350,6 @@ impl RenderOnce for ComponentExampleGroup {
|
||||
}
|
||||
|
||||
impl ComponentExampleGroup {
|
||||
/// Create a new group of examples with the given title.
|
||||
pub fn new(examples: Vec<ComponentExample>) -> Self {
|
||||
Self {
|
||||
title: None,
|
||||
@@ -357,8 +358,6 @@ impl ComponentExampleGroup {
|
||||
vertical: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new group of examples with the given title.
|
||||
pub fn with_title(title: impl Into<SharedString>, examples: Vec<ComponentExample>) -> Self {
|
||||
Self {
|
||||
title: Some(title.into()),
|
||||
@@ -367,21 +366,16 @@ impl ComponentExampleGroup {
|
||||
vertical: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the group to grow to fill the available horizontal space.
|
||||
pub fn grow(mut self) -> Self {
|
||||
self.grow = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Lay the group out vertically.
|
||||
pub fn vertical(mut self) -> Self {
|
||||
self.vertical = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a single example
|
||||
pub fn single_example(
|
||||
variant_name: impl Into<SharedString>,
|
||||
example: AnyElement,
|
||||
@@ -389,12 +383,10 @@ pub fn single_example(
|
||||
ComponentExample::new(variant_name, example)
|
||||
}
|
||||
|
||||
/// Create a group of examples without a title
|
||||
pub fn example_group(examples: Vec<ComponentExample>) -> ComponentExampleGroup {
|
||||
ComponentExampleGroup::new(examples)
|
||||
}
|
||||
|
||||
/// Create a group of examples with a title
|
||||
pub fn example_group_with_title(
|
||||
title: impl Into<SharedString>,
|
||||
examples: Vec<ComponentExample>,
|
||||
|
||||
@@ -43,6 +43,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
|
||||
language_registry,
|
||||
user_store,
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -106,10 +107,12 @@ impl ComponentPreview {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
user_store: Entity<UserStore>,
|
||||
selected_index: impl Into<Option<usize>>,
|
||||
active_page: Option<PreviewPage>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let sorted_components = components().all_sorted();
|
||||
let selected_index = selected_index.into().unwrap_or(0);
|
||||
let active_page = active_page.unwrap_or(PreviewPage::AllComponents);
|
||||
|
||||
let component_list = ListState::new(
|
||||
sorted_components.len(),
|
||||
@@ -135,7 +138,7 @@ impl ComponentPreview {
|
||||
language_registry,
|
||||
user_store,
|
||||
workspace,
|
||||
active_page: PreviewPage::AllComponents,
|
||||
active_page,
|
||||
component_map: components().0,
|
||||
components: sorted_components,
|
||||
component_list,
|
||||
@@ -169,8 +172,7 @@ impl ComponentPreview {
|
||||
fn scope_ordered_entries(&self) -> Vec<PreviewEntry> {
|
||||
use std::collections::HashMap;
|
||||
|
||||
let mut scope_groups: HashMap<Option<ComponentScope>, Vec<ComponentMetadata>> =
|
||||
HashMap::default();
|
||||
let mut scope_groups: HashMap<ComponentScope, Vec<ComponentMetadata>> = HashMap::default();
|
||||
|
||||
for component in &self.components {
|
||||
scope_groups
|
||||
@@ -192,6 +194,7 @@ impl ComponentPreview {
|
||||
ComponentScope::Notification,
|
||||
ComponentScope::Collaboration,
|
||||
ComponentScope::VersionControl,
|
||||
ComponentScope::None,
|
||||
];
|
||||
|
||||
// Always show all components first
|
||||
@@ -199,38 +202,27 @@ impl ComponentPreview {
|
||||
entries.push(PreviewEntry::Separator);
|
||||
|
||||
for scope in known_scopes.iter() {
|
||||
let scope_key = Some(scope.clone());
|
||||
if let Some(components) = scope_groups.remove(&scope_key) {
|
||||
if let Some(components) = scope_groups.remove(scope) {
|
||||
if !components.is_empty() {
|
||||
entries.push(PreviewEntry::SectionHeader(scope.to_string().into()));
|
||||
let mut sorted_components = components;
|
||||
sorted_components.sort_by_key(|component| component.sort_name());
|
||||
|
||||
for component in components {
|
||||
for component in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (scope, components) in &scope_groups {
|
||||
if let Some(ComponentScope::Unknown(_)) = scope {
|
||||
if !components.is_empty() {
|
||||
if let Some(scope_value) = scope {
|
||||
entries.push(PreviewEntry::SectionHeader(scope_value.to_string().into()));
|
||||
}
|
||||
|
||||
for component in components {
|
||||
entries.push(PreviewEntry::Component(component.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(components) = scope_groups.get(&None) {
|
||||
if let Some(components) = scope_groups.get(&ComponentScope::None) {
|
||||
if !components.is_empty() {
|
||||
entries.push(PreviewEntry::Separator);
|
||||
entries.push(PreviewEntry::SectionHeader("Uncategorized".into()));
|
||||
let mut sorted_components = components.clone();
|
||||
sorted_components.sort_by_key(|c| c.sort_name());
|
||||
|
||||
for component in components {
|
||||
for component in sorted_components {
|
||||
entries.push(PreviewEntry::Component(component.clone()));
|
||||
}
|
||||
}
|
||||
@@ -250,7 +242,10 @@ impl ComponentPreview {
|
||||
let id = component_metadata.id();
|
||||
let selected = self.active_page == PreviewPage::Component(id.clone());
|
||||
ListItem::new(ix)
|
||||
.child(Label::new(component_metadata.name().clone()).color(Color::Default))
|
||||
.child(
|
||||
Label::new(component_metadata.scopeless_name().clone())
|
||||
.color(Color::Default),
|
||||
)
|
||||
.selectable(true)
|
||||
.toggle_state(selected)
|
||||
.inset(true)
|
||||
@@ -333,7 +328,7 @@ impl ComponentPreview {
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> impl IntoElement {
|
||||
let name = component.name();
|
||||
let name = component.scopeless_name();
|
||||
let scope = component.scope();
|
||||
|
||||
let description = component.description();
|
||||
@@ -354,13 +349,12 @@ impl ComponentPreview {
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.text_xl()
|
||||
.child(div().child(name))
|
||||
.when_some(scope, |this, scope| {
|
||||
h_flex().gap_1().text_xl().child(div().child(name)).when(
|
||||
!matches!(scope, ComponentScope::None),
|
||||
|this| {
|
||||
this.child(div().opacity(0.5).child(format!("({})", scope)))
|
||||
}),
|
||||
},
|
||||
),
|
||||
)
|
||||
.when_some(description, |this, description| {
|
||||
this.child(
|
||||
@@ -373,7 +367,7 @@ impl ComponentPreview {
|
||||
}),
|
||||
)
|
||||
.when_some(component.preview(), |this, preview| {
|
||||
this.child(preview(window, cx))
|
||||
this.children(preview(window, cx))
|
||||
}),
|
||||
)
|
||||
.into_any_element()
|
||||
@@ -395,17 +389,16 @@ impl ComponentPreview {
|
||||
fn render_component_page(
|
||||
&mut self,
|
||||
component_id: &ComponentId,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let component = self.component_map.get(&component_id);
|
||||
|
||||
if let Some(component) = component {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.flex_initial()
|
||||
.min_h_full()
|
||||
.child(self.render_preview(component, window, cx))
|
||||
.id("render-component-page")
|
||||
.size_full()
|
||||
.child(ComponentPreviewPage::new(component.clone()))
|
||||
.into_any_element()
|
||||
} else {
|
||||
v_flex()
|
||||
@@ -445,10 +438,11 @@ impl Render for ComponentPreview {
|
||||
.overflow_hidden()
|
||||
.size_full()
|
||||
.track_focus(&self.focus_handle)
|
||||
.px_2()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
v_flex()
|
||||
.border_r_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.h_full()
|
||||
.child(
|
||||
uniform_list(
|
||||
@@ -465,6 +459,7 @@ impl Render for ComponentPreview {
|
||||
)
|
||||
.track_scroll(self.nav_scroll_handle.clone())
|
||||
.pt_4()
|
||||
.px_4()
|
||||
.w(px(240.))
|
||||
.h_full()
|
||||
.flex_1(),
|
||||
@@ -527,6 +522,7 @@ impl Item for ComponentPreview {
|
||||
let user_store = self.user_store.clone();
|
||||
let weak_workspace = self.workspace.clone();
|
||||
let selected_index = self.cursor_index;
|
||||
let active_page = self.active_page.clone();
|
||||
|
||||
Some(cx.new(|cx| {
|
||||
Self::new(
|
||||
@@ -534,6 +530,7 @@ impl Item for ComponentPreview {
|
||||
language_registry,
|
||||
user_store,
|
||||
selected_index,
|
||||
Some(active_page),
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
@@ -566,7 +563,14 @@ impl SerializableItem for ComponentPreview {
|
||||
let weak_workspace = workspace.clone();
|
||||
cx.update(|_, cx| {
|
||||
Ok(cx.new(|cx| {
|
||||
ComponentPreview::new(weak_workspace, language_registry, user_store, None, cx)
|
||||
ComponentPreview::new(
|
||||
weak_workspace,
|
||||
language_registry,
|
||||
user_store,
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
})?
|
||||
})
|
||||
@@ -600,3 +604,76 @@ impl SerializableItem for ComponentPreview {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ComponentPreviewPage {
|
||||
// languages: Arc<LanguageRegistry>,
|
||||
component: ComponentMetadata,
|
||||
}
|
||||
|
||||
impl ComponentPreviewPage {
|
||||
pub fn new(
|
||||
component: ComponentMetadata,
|
||||
// languages: Arc<LanguageRegistry>
|
||||
) -> Self {
|
||||
Self {
|
||||
// languages,
|
||||
component,
|
||||
}
|
||||
}
|
||||
|
||||
fn render_header(&self, _: &Window, cx: &App) -> impl IntoElement {
|
||||
v_flex()
|
||||
.px_12()
|
||||
.pt_16()
|
||||
.pb_12()
|
||||
.gap_6()
|
||||
.bg(cx.theme().colors().surface_background)
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(self.component.scope().to_string())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Headline::new(self.component.scopeless_name()).size(HeadlineSize::XLarge),
|
||||
),
|
||||
)
|
||||
.when_some(self.component.description(), |this, description| {
|
||||
this.child(div().text_sm().child(description))
|
||||
})
|
||||
}
|
||||
|
||||
fn render_preview(&self, window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
v_flex()
|
||||
.flex_1()
|
||||
.px_12()
|
||||
.py_6()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(if let Some(preview) = self.component.preview() {
|
||||
preview(window, cx).unwrap_or_else(|| {
|
||||
div()
|
||||
.child("Failed to load preview. This path should be unreachable")
|
||||
.into_any_element()
|
||||
})
|
||||
} else {
|
||||
div().child("No preview available").into_any_element()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ComponentPreviewPage {
|
||||
fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
v_flex()
|
||||
.id("component-preview-page")
|
||||
.overflow_y_scroll()
|
||||
.overflow_x_hidden()
|
||||
.w_full()
|
||||
.child(self.render_header(window, cx))
|
||||
.child(self.render_preview(window, cx))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ impl Tool for ContextServerTool {
|
||||
}
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
fn needs_confirmation(&self, _: &serde_json::Value, _: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
@@ -1280,10 +1280,6 @@ mod tests {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn to_proto(&self, _: &App) -> rpc::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ pub(super) struct NewSessionModal {
|
||||
debug_panel: WeakEntity<DebugPanel>,
|
||||
mode: NewSessionMode,
|
||||
stop_on_entry: ToggleState,
|
||||
initialize_args: Option<serde_json::Value>,
|
||||
debugger: Option<SharedString>,
|
||||
last_selected_profile_name: Option<SharedString>,
|
||||
}
|
||||
@@ -82,17 +83,17 @@ impl NewSessionModal {
|
||||
.map(Into::into)
|
||||
.unwrap_or(ToggleState::Unselected),
|
||||
last_selected_profile_name: None,
|
||||
initialize_args: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn debug_config(&self, cx: &App) -> Option<DebugTaskDefinition> {
|
||||
let request = self.mode.debug_task(cx);
|
||||
|
||||
Some(DebugTaskDefinition {
|
||||
adapter: self.debugger.clone()?.to_string(),
|
||||
label: suggested_label(&request, self.debugger.as_deref()?),
|
||||
request,
|
||||
initialize_args: None,
|
||||
initialize_args: self.initialize_args.clone(),
|
||||
tcp_connection: None,
|
||||
locator: None,
|
||||
stop_on_entry: match self.stop_on_entry {
|
||||
@@ -228,7 +229,7 @@ impl NewSessionModal {
|
||||
weak.update(cx, |this, cx| {
|
||||
this.last_selected_profile_name = Some(SharedString::from(&task.label));
|
||||
this.debugger = Some(task.adapter.clone().into());
|
||||
|
||||
this.initialize_args = task.initialize_args.clone();
|
||||
match &task.request {
|
||||
DebugRequestType::Launch(launch_config) => {
|
||||
this.mode = NewSessionMode::launch(
|
||||
|
||||
@@ -356,7 +356,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
|
||||
Some(project::Completion {
|
||||
old_range: buffer_position..buffer_position,
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
@@ -428,10 +428,10 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let start = buffer_offset - word_bytes_length;
|
||||
let start = snapshot.anchor_before(start);
|
||||
let old_range = start..buffer_position;
|
||||
let replace_range = start..buffer_position;
|
||||
|
||||
project::Completion {
|
||||
old_range,
|
||||
replace_range,
|
||||
new_text,
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
|
||||
@@ -3,6 +3,7 @@ use super::*;
|
||||
use gpui::{action_as, action_with_deprecated_aliases, actions};
|
||||
use schemars::JsonSchema;
|
||||
use util::serde::default_true;
|
||||
|
||||
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct SelectNext {
|
||||
@@ -262,6 +263,8 @@ actions!(
|
||||
Cancel,
|
||||
CancelLanguageServerWork,
|
||||
ConfirmRename,
|
||||
ConfirmCompletionInsert,
|
||||
ConfirmCompletionReplace,
|
||||
ContextMenuFirst,
|
||||
ContextMenuLast,
|
||||
ContextMenuNext,
|
||||
|
||||
@@ -230,7 +230,7 @@ impl CompletionsMenu {
|
||||
let completions = choices
|
||||
.iter()
|
||||
.map(|choice| Completion {
|
||||
old_range: selection.start.text_anchor..selection.end.text_anchor,
|
||||
replace_range: selection.start.text_anchor..selection.end.text_anchor,
|
||||
new_text: choice.to_string(),
|
||||
label: CodeLabel {
|
||||
text: choice.to_string(),
|
||||
|
||||
@@ -109,8 +109,8 @@ use language::{
|
||||
IndentKind, IndentSize, Language, OffsetRangeExt, Point, Selection, SelectionGoal, TextObject,
|
||||
TransactionId, TreeSitterOptions, WordsQuery,
|
||||
language_settings::{
|
||||
self, InlayHintSettings, RewrapBehavior, WordsCompletionMode, all_language_settings,
|
||||
language_settings,
|
||||
self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode,
|
||||
all_language_settings, language_settings,
|
||||
},
|
||||
point_from_lsp, text_diff_with_options,
|
||||
};
|
||||
@@ -131,7 +131,7 @@ pub use proposed_changes_editor::{
|
||||
};
|
||||
use smallvec::smallvec;
|
||||
use std::{cell::OnceCell, iter::Peekable};
|
||||
use task::{ResolvedTask, TaskTemplate, TaskVariables};
|
||||
use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
|
||||
|
||||
pub use lsp::CompletionContext;
|
||||
use lsp::{
|
||||
@@ -140,6 +140,7 @@ use lsp::{
|
||||
};
|
||||
|
||||
use language::BufferSnapshot;
|
||||
pub use lsp_ext::lsp_tasks;
|
||||
use movement::TextLayoutDetails;
|
||||
pub use multi_buffer::{
|
||||
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, RowInfo,
|
||||
@@ -4461,7 +4462,7 @@ impl Editor {
|
||||
words.remove(&lsp_completion.new_text);
|
||||
}
|
||||
completions.extend(words.into_iter().map(|(word, word_range)| Completion {
|
||||
old_range: old_range.clone(),
|
||||
replace_range: old_range.clone(),
|
||||
new_text: word.clone(),
|
||||
label: CodeLabel::plain(word, None),
|
||||
icon_path: None,
|
||||
@@ -4568,6 +4569,26 @@ impl Editor {
|
||||
self.do_completion(action.item_ix, CompletionIntent::Complete, window, cx)
|
||||
}
|
||||
|
||||
pub fn confirm_completion_insert(
|
||||
&mut self,
|
||||
_: &ConfirmCompletionInsert,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
|
||||
self.do_completion(None, CompletionIntent::CompleteWithInsert, window, cx)
|
||||
}
|
||||
|
||||
pub fn confirm_completion_replace(
|
||||
&mut self,
|
||||
_: &ConfirmCompletionReplace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
|
||||
self.do_completion(None, CompletionIntent::CompleteWithReplace, window, cx)
|
||||
}
|
||||
|
||||
pub fn compose_completion(
|
||||
&mut self,
|
||||
action: &ComposeCompletion,
|
||||
@@ -4587,12 +4608,10 @@ impl Editor {
|
||||
) -> Option<Task<Result<()>>> {
|
||||
use language::ToOffset as _;
|
||||
|
||||
let completions_menu =
|
||||
if let CodeContextMenu::Completions(menu) = self.hide_context_menu(window, cx)? {
|
||||
menu
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
let CodeContextMenu::Completions(completions_menu) = self.hide_context_menu(window, cx)?
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let candidate_id = {
|
||||
let entries = completions_menu.entries.borrow();
|
||||
@@ -4621,9 +4640,12 @@ impl Editor {
|
||||
new_text = completion.new_text.clone();
|
||||
};
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
|
||||
let replace_range = choose_completion_range(&completion, intent, &buffer_handle, cx);
|
||||
let buffer = buffer_handle.read(cx);
|
||||
let old_range = completion.old_range.to_offset(buffer);
|
||||
let old_text = buffer.text_for_range(old_range.clone()).collect::<String>();
|
||||
let old_text = buffer
|
||||
.text_for_range(replace_range.clone())
|
||||
.collect::<String>();
|
||||
|
||||
let newest_selection = self.selections.newest_anchor();
|
||||
if newest_selection.start.buffer_id != Some(buffer_handle.read(cx).remote_id()) {
|
||||
@@ -4634,8 +4656,8 @@ impl Editor {
|
||||
.start
|
||||
.text_anchor
|
||||
.to_offset(buffer)
|
||||
.saturating_sub(old_range.start);
|
||||
let lookahead = old_range
|
||||
.saturating_sub(replace_range.start);
|
||||
let lookahead = replace_range
|
||||
.end
|
||||
.saturating_sub(newest_selection.end.text_anchor.to_offset(buffer));
|
||||
let mut common_prefix_len = 0;
|
||||
@@ -4664,8 +4686,8 @@ impl Editor {
|
||||
ranges.clear();
|
||||
ranges.extend(selections.iter().map(|s| {
|
||||
if s.id == newest_selection.id {
|
||||
range_to_replace = Some(old_range.clone());
|
||||
old_range.clone()
|
||||
range_to_replace = Some(replace_range.clone());
|
||||
replace_range.clone()
|
||||
} else {
|
||||
s.start..s.end
|
||||
}
|
||||
@@ -12449,12 +12471,13 @@ impl Editor {
|
||||
return Task::ready(());
|
||||
}
|
||||
let project = self.project.as_ref().map(Entity::downgrade);
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let task_sources = self.lsp_task_sources(cx);
|
||||
cx.spawn_in(window, async move |editor, cx| {
|
||||
cx.background_executor().timer(UPDATE_DEBOUNCE).await;
|
||||
let Some(project) = project.and_then(|p| p.upgrade()) else {
|
||||
return;
|
||||
};
|
||||
let Ok(display_snapshot) = this.update(cx, |this, cx| {
|
||||
let Ok(display_snapshot) = editor.update(cx, |this, cx| {
|
||||
this.display_map.update(cx, |map, cx| map.snapshot(cx))
|
||||
}) else {
|
||||
return;
|
||||
@@ -12477,15 +12500,77 @@ impl Editor {
|
||||
}
|
||||
})
|
||||
.await;
|
||||
let Ok(lsp_tasks) =
|
||||
cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let lsp_tasks = lsp_tasks.await;
|
||||
|
||||
let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| {
|
||||
lsp_tasks
|
||||
.into_iter()
|
||||
.flat_map(|(kind, tasks)| {
|
||||
tasks.into_iter().filter_map(move |(location, task)| {
|
||||
Some((kind.clone(), location?, task))
|
||||
})
|
||||
})
|
||||
.fold(HashMap::default(), |mut acc, (kind, location, task)| {
|
||||
let buffer = location.target.buffer;
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let offset = display_snapshot.buffer_snapshot.excerpts().find_map(
|
||||
|(excerpt_id, snapshot, _)| {
|
||||
if snapshot.remote_id() == buffer_snapshot.remote_id() {
|
||||
display_snapshot
|
||||
.buffer_snapshot
|
||||
.anchor_in_excerpt(excerpt_id, location.target.range.start)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
);
|
||||
if let Some(offset) = offset {
|
||||
let task_buffer_range =
|
||||
location.target.range.to_point(&buffer_snapshot);
|
||||
let context_buffer_range =
|
||||
task_buffer_range.to_offset(&buffer_snapshot);
|
||||
let context_range = BufferOffset(context_buffer_range.start)
|
||||
..BufferOffset(context_buffer_range.end);
|
||||
|
||||
acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row))
|
||||
.or_insert_with(|| RunnableTasks {
|
||||
templates: Vec::new(),
|
||||
offset,
|
||||
column: task_buffer_range.start.column,
|
||||
extra_variables: HashMap::default(),
|
||||
context_range,
|
||||
})
|
||||
.templates
|
||||
.push((kind, task.original_task().clone()));
|
||||
}
|
||||
|
||||
acc
|
||||
})
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let rows = Self::runnable_rows(project, display_snapshot, new_rows, cx.clone());
|
||||
this.update(cx, |this, _| {
|
||||
this.clear_tasks();
|
||||
for (key, value) in rows {
|
||||
this.insert_tasks(key, value);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
editor
|
||||
.update(cx, |editor, _| {
|
||||
editor.clear_tasks();
|
||||
for (key, mut value) in rows {
|
||||
if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) {
|
||||
value.templates.extend(lsp_tasks.templates);
|
||||
}
|
||||
|
||||
editor.insert_tasks(key, value);
|
||||
}
|
||||
for (key, value) in lsp_tasks_by_rows {
|
||||
editor.insert_tasks(key, value);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
fn fetch_runnable_ranges(
|
||||
@@ -12500,7 +12585,7 @@ impl Editor {
|
||||
snapshot: DisplaySnapshot,
|
||||
runnable_ranges: Vec<RunnableRange>,
|
||||
mut cx: AsyncWindowContext,
|
||||
) -> Vec<((BufferId, u32), RunnableTasks)> {
|
||||
) -> Vec<((BufferId, BufferRow), RunnableTasks)> {
|
||||
runnable_ranges
|
||||
.into_iter()
|
||||
.filter_map(|mut runnable| {
|
||||
@@ -12557,11 +12642,9 @@ impl Editor {
|
||||
)
|
||||
});
|
||||
|
||||
let tags = mem::take(&mut runnable.tags);
|
||||
let mut tags: Vec<_> = tags
|
||||
let mut templates_with_tags = mem::take(&mut runnable.tags)
|
||||
.into_iter()
|
||||
.flat_map(|tag| {
|
||||
let tag = tag.0.clone();
|
||||
.flat_map(|RunnableTag(tag)| {
|
||||
inventory
|
||||
.as_ref()
|
||||
.into_iter()
|
||||
@@ -12578,20 +12661,20 @@ impl Editor {
|
||||
})
|
||||
})
|
||||
.sorted_by_key(|(kind, _)| kind.to_owned())
|
||||
.collect();
|
||||
if let Some((leading_tag_source, _)) = tags.first() {
|
||||
.collect::<Vec<_>>();
|
||||
if let Some((leading_tag_source, _)) = templates_with_tags.first() {
|
||||
// Strongest source wins; if we have worktree tag binding, prefer that to
|
||||
// global and language bindings;
|
||||
// if we have a global binding, prefer that to language binding.
|
||||
let first_mismatch = tags
|
||||
let first_mismatch = templates_with_tags
|
||||
.iter()
|
||||
.position(|(tag_source, _)| tag_source != leading_tag_source);
|
||||
if let Some(index) = first_mismatch {
|
||||
tags.truncate(index);
|
||||
templates_with_tags.truncate(index);
|
||||
}
|
||||
}
|
||||
|
||||
tags
|
||||
templates_with_tags
|
||||
}
|
||||
|
||||
pub fn move_to_enclosing_bracket(
|
||||
@@ -17918,6 +18001,81 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
// Consider user intent and default settings
|
||||
fn choose_completion_range(
|
||||
completion: &Completion,
|
||||
intent: CompletionIntent,
|
||||
buffer: &Entity<Buffer>,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Range<usize> {
|
||||
fn should_replace(
|
||||
completion: &Completion,
|
||||
insert_range: &Range<text::Anchor>,
|
||||
intent: CompletionIntent,
|
||||
completion_mode_setting: LspInsertMode,
|
||||
buffer: &Buffer,
|
||||
) -> bool {
|
||||
// specific actions take precedence over settings
|
||||
match intent {
|
||||
CompletionIntent::CompleteWithInsert => return false,
|
||||
CompletionIntent::CompleteWithReplace => return true,
|
||||
CompletionIntent::Complete | CompletionIntent::Compose => {}
|
||||
}
|
||||
|
||||
match completion_mode_setting {
|
||||
LspInsertMode::Insert => false,
|
||||
LspInsertMode::Replace => true,
|
||||
LspInsertMode::ReplaceSubsequence => {
|
||||
let mut text_to_replace = buffer.chars_for_range(
|
||||
buffer.anchor_before(completion.replace_range.start)
|
||||
..buffer.anchor_after(completion.replace_range.end),
|
||||
);
|
||||
let mut completion_text = completion.new_text.chars();
|
||||
|
||||
// is `text_to_replace` a subsequence of `completion_text`
|
||||
text_to_replace
|
||||
.all(|needle_ch| completion_text.any(|haystack_ch| haystack_ch == needle_ch))
|
||||
}
|
||||
LspInsertMode::ReplaceSuffix => {
|
||||
let range_after_cursor = insert_range.end..completion.replace_range.end;
|
||||
|
||||
let text_after_cursor = buffer
|
||||
.text_for_range(
|
||||
buffer.anchor_before(range_after_cursor.start)
|
||||
..buffer.anchor_after(range_after_cursor.end),
|
||||
)
|
||||
.collect::<String>();
|
||||
completion.new_text.ends_with(&text_after_cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
if let CompletionSource::Lsp {
|
||||
insert_range: Some(insert_range),
|
||||
..
|
||||
} = &completion.source
|
||||
{
|
||||
let completion_mode_setting =
|
||||
language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
|
||||
.completions
|
||||
.lsp_insert_mode;
|
||||
|
||||
if !should_replace(
|
||||
completion,
|
||||
&insert_range,
|
||||
intent,
|
||||
completion_mode_setting,
|
||||
buffer,
|
||||
) {
|
||||
return insert_range.to_offset(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
completion.replace_range.to_offset(buffer)
|
||||
}
|
||||
|
||||
fn insert_extra_newline_brackets(
|
||||
buffer: &MultiBufferSnapshot,
|
||||
range: Range<usize>,
|
||||
@@ -18639,9 +18797,10 @@ fn snippet_completions(
|
||||
end: lsp_end,
|
||||
};
|
||||
Some(Completion {
|
||||
old_range: range,
|
||||
replace_range: range,
|
||||
new_text: snippet.body.clone(),
|
||||
source: CompletionSource::Lsp {
|
||||
insert_range: None,
|
||||
server_id: LanguageServerId(usize::MAX),
|
||||
resolved: true,
|
||||
lsp_completion: Box::new(lsp::CompletionItem {
|
||||
|
||||
@@ -9218,7 +9218,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: String,
|
||||
buffer_marked_text: String,
|
||||
completion_text: &'static str,
|
||||
expected_with_insertion_mode: String,
|
||||
expected_with_insert_mode: String,
|
||||
expected_with_replace_mode: String,
|
||||
expected_with_replace_subsequence_mode: String,
|
||||
expected_with_replace_suffix_mode: String,
|
||||
@@ -9230,7 +9230,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "before ediˇ after".into(),
|
||||
buffer_marked_text: "before <edi|> after".into(),
|
||||
completion_text: "editor",
|
||||
expected_with_insertion_mode: "before editorˇ after".into(),
|
||||
expected_with_insert_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_suffix_mode: "before editorˇ after".into(),
|
||||
@@ -9240,7 +9240,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "before ediˇtor after".into(),
|
||||
buffer_marked_text: "before <edi|tor> after".into(),
|
||||
completion_text: "editor",
|
||||
expected_with_insertion_mode: "before editorˇtor after".into(),
|
||||
expected_with_insert_mode: "before editorˇtor after".into(),
|
||||
expected_with_replace_mode: "before ediˇtor after".into(),
|
||||
expected_with_replace_subsequence_mode: "before ediˇtor after".into(),
|
||||
expected_with_replace_suffix_mode: "before ediˇtor after".into(),
|
||||
@@ -9250,7 +9250,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "before torˇ after".into(),
|
||||
buffer_marked_text: "before <tor|> after".into(),
|
||||
completion_text: "editor",
|
||||
expected_with_insertion_mode: "before editorˇ after".into(),
|
||||
expected_with_insert_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_suffix_mode: "before editorˇ after".into(),
|
||||
@@ -9260,7 +9260,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "before ˇtor after".into(),
|
||||
buffer_marked_text: "before <|tor> after".into(),
|
||||
completion_text: "editor",
|
||||
expected_with_insertion_mode: "before editorˇtor after".into(),
|
||||
expected_with_insert_mode: "before editorˇtor after".into(),
|
||||
expected_with_replace_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_subsequence_mode: "before editorˇ after".into(),
|
||||
expected_with_replace_suffix_mode: "before editorˇ after".into(),
|
||||
@@ -9270,7 +9270,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "pˇfield: bool".into(),
|
||||
buffer_marked_text: "<p|field>: bool".into(),
|
||||
completion_text: "pub ",
|
||||
expected_with_insertion_mode: "pub ˇfield: bool".into(),
|
||||
expected_with_insert_mode: "pub ˇfield: bool".into(),
|
||||
expected_with_replace_mode: "pub ˇ: bool".into(),
|
||||
expected_with_replace_subsequence_mode: "pub ˇfield: bool".into(),
|
||||
expected_with_replace_suffix_mode: "pub ˇfield: bool".into(),
|
||||
@@ -9280,7 +9280,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "[element_ˇelement_2]".into(),
|
||||
buffer_marked_text: "[<element_|element_2>]".into(),
|
||||
completion_text: "element_1",
|
||||
expected_with_insertion_mode: "[element_1ˇelement_2]".into(),
|
||||
expected_with_insert_mode: "[element_1ˇelement_2]".into(),
|
||||
expected_with_replace_mode: "[element_1ˇ]".into(),
|
||||
expected_with_replace_subsequence_mode: "[element_1ˇelement_2]".into(),
|
||||
expected_with_replace_suffix_mode: "[element_1ˇelement_2]".into(),
|
||||
@@ -9290,7 +9290,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "[elˇelement]".into(),
|
||||
buffer_marked_text: "[<el|element>]".into(),
|
||||
completion_text: "element",
|
||||
expected_with_insertion_mode: "[elementˇelement]".into(),
|
||||
expected_with_insert_mode: "[elementˇelement]".into(),
|
||||
expected_with_replace_mode: "[elˇement]".into(),
|
||||
expected_with_replace_subsequence_mode: "[elementˇelement]".into(),
|
||||
expected_with_replace_suffix_mode: "[elˇement]".into(),
|
||||
@@ -9300,7 +9300,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "SubˇError".into(),
|
||||
buffer_marked_text: "<Sub|Error>".into(),
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insertion_mode: "SubscriptionErrorˇError".into(),
|
||||
expected_with_insert_mode: "SubscriptionErrorˇError".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_subsequence_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_suffix_mode: "SubscriptionErrorˇ".into(),
|
||||
@@ -9310,7 +9310,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "SubˇErr".into(),
|
||||
buffer_marked_text: "<Sub|Err>".into(),
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insertion_mode: "SubscriptionErrorˇErr".into(),
|
||||
expected_with_insert_mode: "SubscriptionErrorˇErr".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_subsequence_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_suffix_mode: "SubscriptionErrorˇErr".into(),
|
||||
@@ -9320,7 +9320,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "Suˇscrirr".into(),
|
||||
buffer_marked_text: "<Su|scrirr>".into(),
|
||||
completion_text: "SubscriptionError",
|
||||
expected_with_insertion_mode: "SubscriptionErrorˇscrirr".into(),
|
||||
expected_with_insert_mode: "SubscriptionErrorˇscrirr".into(),
|
||||
expected_with_replace_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_subsequence_mode: "SubscriptionErrorˇ".into(),
|
||||
expected_with_replace_suffix_mode: "SubscriptionErrorˇscrirr".into(),
|
||||
@@ -9330,7 +9330,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
initial_state: "foo(indˇix)".into(),
|
||||
buffer_marked_text: "foo(<ind|ix>)".into(),
|
||||
completion_text: "node_index",
|
||||
expected_with_insertion_mode: "foo(node_indexˇix)".into(),
|
||||
expected_with_insert_mode: "foo(node_indexˇix)".into(),
|
||||
expected_with_replace_mode: "foo(node_indexˇ)".into(),
|
||||
expected_with_replace_subsequence_mode: "foo(node_indexˇix)".into(),
|
||||
expected_with_replace_suffix_mode: "foo(node_indexˇix)".into(),
|
||||
@@ -9339,7 +9339,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
|
||||
for run in runs {
|
||||
let run_variations = [
|
||||
(LspInsertMode::Insert, run.expected_with_insertion_mode),
|
||||
(LspInsertMode::Insert, run.expected_with_insert_mode),
|
||||
(LspInsertMode::Replace, run.expected_with_replace_mode),
|
||||
(
|
||||
LspInsertMode::ReplaceSubsequence,
|
||||
@@ -9395,6 +9395,98 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let initial_state = "SubˇError";
|
||||
let buffer_marked_text = "<Sub|Error>";
|
||||
let completion_text = "SubscriptionError";
|
||||
let expected_with_insert_mode = "SubscriptionErrorˇError";
|
||||
let expected_with_replace_mode = "SubscriptionErrorˇ";
|
||||
|
||||
update_test_language_settings(&mut cx, |settings| {
|
||||
settings.defaults.completions = Some(CompletionSettings {
|
||||
words: WordsCompletionMode::Disabled,
|
||||
// set the opposite here to ensure that the action is overriding the default behavior
|
||||
lsp_insert_mode: LspInsertMode::Insert,
|
||||
lsp: true,
|
||||
lsp_fetch_timeout_ms: 0,
|
||||
});
|
||||
});
|
||||
|
||||
cx.set_state(initial_state);
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
});
|
||||
|
||||
let counter = Arc::new(AtomicUsize::new(0));
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
&buffer_marked_text,
|
||||
vec![completion_text],
|
||||
counter.clone(),
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
|
||||
let apply_additional_edits = cx.update_editor(|editor, window, cx| {
|
||||
editor
|
||||
.confirm_completion_replace(&ConfirmCompletionReplace, window, cx)
|
||||
.unwrap()
|
||||
});
|
||||
cx.assert_editor_state(&expected_with_replace_mode);
|
||||
handle_resolve_completion_request(&mut cx, None).await;
|
||||
apply_additional_edits.await.unwrap();
|
||||
|
||||
update_test_language_settings(&mut cx, |settings| {
|
||||
settings.defaults.completions = Some(CompletionSettings {
|
||||
words: WordsCompletionMode::Disabled,
|
||||
// set the opposite here to ensure that the action is overriding the default behavior
|
||||
lsp_insert_mode: LspInsertMode::Replace,
|
||||
lsp: true,
|
||||
lsp_fetch_timeout_ms: 0,
|
||||
});
|
||||
});
|
||||
|
||||
cx.set_state(initial_state);
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
|
||||
});
|
||||
handle_completion_request_with_insert_and_replace(
|
||||
&mut cx,
|
||||
&buffer_marked_text,
|
||||
vec![completion_text],
|
||||
counter.clone(),
|
||||
)
|
||||
.await;
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 2);
|
||||
|
||||
let apply_additional_edits = cx.update_editor(|editor, window, cx| {
|
||||
editor
|
||||
.confirm_completion_insert(&ConfirmCompletionInsert, window, cx)
|
||||
.unwrap()
|
||||
});
|
||||
cx.assert_editor_state(&expected_with_insert_mode);
|
||||
handle_resolve_completion_request(&mut cx, None).await;
|
||||
apply_additional_edits.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completion(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -12539,6 +12631,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
|
||||
initialization_options: Some(json!({
|
||||
"some other init value": false
|
||||
})),
|
||||
enable_lsp_tasks: false,
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -12558,6 +12651,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
|
||||
initialization_options: Some(json!({
|
||||
"anotherInitValue": false
|
||||
})),
|
||||
enable_lsp_tasks: false,
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -12577,6 +12671,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
|
||||
initialization_options: Some(json!({
|
||||
"anotherInitValue": false
|
||||
})),
|
||||
enable_lsp_tasks: false,
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -12594,6 +12689,7 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut TestAppCon
|
||||
binary: None,
|
||||
settings: None,
|
||||
initialization_options: None,
|
||||
enable_lsp_tasks: false,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
@@ -461,6 +461,20 @@ impl EditorElement {
|
||||
cx.propagate();
|
||||
}
|
||||
});
|
||||
register_action(editor, window, |editor, action, window, cx| {
|
||||
if let Some(task) = editor.confirm_completion_replace(action, window, cx) {
|
||||
task.detach_and_notify_err(window, cx);
|
||||
} else {
|
||||
cx.propagate();
|
||||
}
|
||||
});
|
||||
register_action(editor, window, |editor, action, window, cx| {
|
||||
if let Some(task) = editor.confirm_completion_insert(action, window, cx) {
|
||||
task.detach_and_notify_err(window, cx);
|
||||
} else {
|
||||
cx.propagate();
|
||||
}
|
||||
});
|
||||
register_action(editor, window, |editor, action, window, cx| {
|
||||
if let Some(task) = editor.compose_completion(action, window, cx) {
|
||||
task.detach_and_notify_err(window, cx);
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::Editor;
|
||||
use collections::HashMap;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
use itertools::Itertools;
|
||||
use language::Buffer;
|
||||
use language::Language;
|
||||
use lsp::LanguageServerId;
|
||||
use lsp::LanguageServerName;
|
||||
use multi_buffer::Anchor;
|
||||
use project::LanguageServerToQuery;
|
||||
use project::LocationLink;
|
||||
use project::Project;
|
||||
use project::TaskSourceKind;
|
||||
use project::lsp_store::lsp_ext_command::GetLspRunnables;
|
||||
use smol::stream::StreamExt;
|
||||
use task::ResolvedTask;
|
||||
use task::TaskContext;
|
||||
use text::BufferId;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub(crate) fn find_specific_language_server_in_selection<F>(
|
||||
editor: &Editor,
|
||||
@@ -60,3 +73,83 @@ where
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
pub fn lsp_tasks(
|
||||
project: Entity<Project>,
|
||||
task_sources: &HashMap<LanguageServerName, Vec<BufferId>>,
|
||||
for_position: Option<text::Anchor>,
|
||||
cx: &mut App,
|
||||
) -> Task<Vec<(TaskSourceKind, Vec<(Option<LocationLink>, ResolvedTask)>)>> {
|
||||
let mut lsp_task_sources = task_sources
|
||||
.iter()
|
||||
.map(|(name, buffer_ids)| {
|
||||
let buffers = buffer_ids
|
||||
.iter()
|
||||
.filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
|
||||
.collect::<Vec<_>>();
|
||||
language_server_for_buffers(project.clone(), name.clone(), buffers, cx)
|
||||
})
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let mut lsp_tasks = Vec::new();
|
||||
let lsp_task_context = TaskContext::default();
|
||||
while let Some(server_to_query) = lsp_task_sources.next().await {
|
||||
if let Some((server_id, buffers)) = server_to_query {
|
||||
let source_kind = TaskSourceKind::Lsp(server_id);
|
||||
let id_base = source_kind.to_id_base();
|
||||
let mut new_lsp_tasks = Vec::new();
|
||||
for buffer in buffers {
|
||||
if let Ok(runnables_task) = project.update(cx, |project, cx| {
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
project.request_lsp(
|
||||
buffer,
|
||||
LanguageServerToQuery::Other(server_id),
|
||||
GetLspRunnables {
|
||||
buffer_id,
|
||||
position: for_position,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}) {
|
||||
if let Some(new_runnables) = runnables_task.await.log_err() {
|
||||
new_lsp_tasks.extend(new_runnables.runnables.into_iter().filter_map(
|
||||
|(location, runnable)| {
|
||||
let resolved_task =
|
||||
runnable.resolve_task(&id_base, &lsp_task_context)?;
|
||||
Some((location, resolved_task))
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
lsp_tasks.push((source_kind, new_lsp_tasks));
|
||||
}
|
||||
}
|
||||
lsp_tasks
|
||||
})
|
||||
}
|
||||
|
||||
fn language_server_for_buffers(
|
||||
project: Entity<Project>,
|
||||
name: LanguageServerName,
|
||||
candidates: Vec<Entity<Buffer>>,
|
||||
cx: &mut App,
|
||||
) -> Task<Option<(LanguageServerId, Vec<Entity<Buffer>>)>> {
|
||||
cx.spawn(async move |cx| {
|
||||
for buffer in &candidates {
|
||||
let server_id = buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.language_server_id_for_name(buffer, &name.0, cx)
|
||||
})
|
||||
})
|
||||
.ok()?
|
||||
.await;
|
||||
if let Some(server_id) = server_id {
|
||||
return Some((server_id, candidates));
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use crate::Editor;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task, Window};
|
||||
use project::Location;
|
||||
use lsp::LanguageServerName;
|
||||
use project::{Location, project_settings::ProjectSettings};
|
||||
use settings::Settings as _;
|
||||
use task::{TaskContext, TaskVariables, VariableName};
|
||||
use text::{ToOffset, ToPoint};
|
||||
use text::{BufferId, ToOffset, ToPoint};
|
||||
|
||||
impl Editor {
|
||||
pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
|
||||
@@ -70,4 +73,38 @@ impl Editor {
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn lsp_task_sources(&self, cx: &App) -> HashMap<LanguageServerName, Vec<BufferId>> {
|
||||
let lsp_settings = &ProjectSettings::get_global(cx).lsp;
|
||||
|
||||
self.buffer()
|
||||
.read(cx)
|
||||
.all_buffers()
|
||||
.into_iter()
|
||||
.filter_map(|buffer| {
|
||||
let lsp_tasks_source = buffer
|
||||
.read(cx)
|
||||
.language()?
|
||||
.context_provider()?
|
||||
.lsp_task_source()?;
|
||||
if lsp_settings
|
||||
.get(&lsp_tasks_source)
|
||||
.map_or(true, |s| s.enable_lsp_tasks)
|
||||
{
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
Some((lsp_tasks_source, buffer_id))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.fold(
|
||||
HashMap::default(),
|
||||
|mut acc, (lsp_task_source, buffer_id)| {
|
||||
acc.entry(lsp_task_source)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(buffer_id);
|
||||
acc
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,10 +244,6 @@ impl language::File for GitBlob {
|
||||
self.worktree_id
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_proto(&self, _cx: &App) -> language::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
@@ -282,10 +278,6 @@ impl language::File for CommitMetadataFile {
|
||||
self.worktree_id
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_proto(&self, _: &App) -> language::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
@@ -3953,8 +3953,7 @@ impl Render for GitPanelMessageTooltip {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement, IntoComponent)]
|
||||
#[component(scope = "Version Control")]
|
||||
#[derive(IntoElement, RegisterComponent)]
|
||||
pub struct PanelRepoFooter {
|
||||
active_repository: SharedString,
|
||||
branch: Option<Branch>,
|
||||
@@ -4134,8 +4133,12 @@ impl RenderOnce for PanelRepoFooter {
|
||||
}
|
||||
}
|
||||
|
||||
impl ComponentPreview for PanelRepoFooter {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
impl Component for PanelRepoFooter {
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::VersionControl
|
||||
}
|
||||
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
|
||||
let unknown_upstream = None;
|
||||
let no_remote_upstream = Some(UpstreamTracking::Gone);
|
||||
let ahead_of_upstream = Some(
|
||||
@@ -4207,192 +4210,180 @@ impl ComponentPreview for PanelRepoFooter {
|
||||
}
|
||||
|
||||
let example_width = px(340.);
|
||||
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.w_full()
|
||||
.flex_none()
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Action Button States",
|
||||
vec![
|
||||
single_example(
|
||||
"No Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(1).clone(),
|
||||
None,
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Remote status unknown",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(2).clone(),
|
||||
Some(branch(unknown_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"No Remote Upstream",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(3).clone(),
|
||||
Some(branch(no_remote_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Not Ahead or Behind",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(4).clone(),
|
||||
Some(branch(not_ahead_or_behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Behind remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(5).clone(),
|
||||
Some(branch(behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Ahead of remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(6).clone(),
|
||||
Some(branch(ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Ahead and behind remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(7).clone(),
|
||||
Some(branch(ahead_and_behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
],
|
||||
)
|
||||
.grow()
|
||||
.vertical(),
|
||||
])
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Labels",
|
||||
vec![
|
||||
single_example(
|
||||
"Short Branch & Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom("main", behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Long Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom(
|
||||
"redesign-and-update-git-ui-list-entry-style",
|
||||
behind_upstream,
|
||||
)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Long Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed-industries-community-examples"),
|
||||
Some(custom("gpui", ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Long Repo & Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed-industries-community-examples"),
|
||||
Some(custom(
|
||||
"redesign-and-update-git-ui-list-entry-style",
|
||||
behind_upstream,
|
||||
)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Uppercase Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("LICENSES"),
|
||||
Some(custom("main", ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
single_example(
|
||||
"Uppercase Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom("update-README", behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
.grow(),
|
||||
],
|
||||
)
|
||||
.grow()
|
||||
.vertical(),
|
||||
])
|
||||
.into_any_element()
|
||||
Some(
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.w_full()
|
||||
.flex_none()
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Action Button States",
|
||||
vec![
|
||||
single_example(
|
||||
"No Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(1).clone(),
|
||||
None,
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Remote status unknown",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(2).clone(),
|
||||
Some(branch(unknown_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"No Remote Upstream",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(3).clone(),
|
||||
Some(branch(no_remote_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Not Ahead or Behind",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(4).clone(),
|
||||
Some(branch(not_ahead_or_behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Behind remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(5).clone(),
|
||||
Some(branch(behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Ahead of remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(6).clone(),
|
||||
Some(branch(ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Ahead and behind remote",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
active_repository(7).clone(),
|
||||
Some(branch(ahead_and_behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
],
|
||||
)
|
||||
.grow()
|
||||
.vertical(),
|
||||
])
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Labels",
|
||||
vec![
|
||||
single_example(
|
||||
"Short Branch & Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom("main", behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Long Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom(
|
||||
"redesign-and-update-git-ui-list-entry-style",
|
||||
behind_upstream,
|
||||
)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Long Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed-industries-community-examples"),
|
||||
Some(custom("gpui", ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Long Repo & Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed-industries-community-examples"),
|
||||
Some(custom(
|
||||
"redesign-and-update-git-ui-list-entry-style",
|
||||
behind_upstream,
|
||||
)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Uppercase Repo",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("LICENSES"),
|
||||
Some(custom("main", ahead_of_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Uppercase Branch",
|
||||
div()
|
||||
.w(example_width)
|
||||
.overflow_hidden()
|
||||
.child(PanelRepoFooter::new_preview(
|
||||
SharedString::from("zed"),
|
||||
Some(custom("update-README", behind_upstream)),
|
||||
))
|
||||
.into_any_element(),
|
||||
),
|
||||
],
|
||||
)
|
||||
.grow()
|
||||
.vertical(),
|
||||
])
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -441,8 +441,8 @@ mod remote_button {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement, IntoComponent)]
|
||||
#[component(scope = "Version Control")]
|
||||
/// A visual representation of a file's Git status.
|
||||
#[derive(IntoElement, RegisterComponent)]
|
||||
pub struct GitStatusIcon {
|
||||
status: FileStatus,
|
||||
}
|
||||
@@ -484,8 +484,12 @@ impl RenderOnce for GitStatusIcon {
|
||||
}
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for GitStatusIcon {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
impl Component for GitStatusIcon {
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::VersionControl
|
||||
}
|
||||
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
|
||||
fn tracked_file_status(code: StatusCode) -> FileStatus {
|
||||
FileStatus::Tracked(git::status::TrackedStatus {
|
||||
index_status: code,
|
||||
@@ -502,17 +506,19 @@ impl ComponentPreview for GitStatusIcon {
|
||||
}
|
||||
.into();
|
||||
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![example_group(vec![
|
||||
single_example("Modified", GitStatusIcon::new(modified).into_any_element()),
|
||||
single_example("Added", GitStatusIcon::new(added).into_any_element()),
|
||||
single_example("Deleted", GitStatusIcon::new(deleted).into_any_element()),
|
||||
single_example(
|
||||
"Conflicted",
|
||||
GitStatusIcon::new(conflict).into_any_element(),
|
||||
),
|
||||
])])
|
||||
.into_any_element()
|
||||
Some(
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![example_group(vec![
|
||||
single_example("Modified", GitStatusIcon::new(modified).into_any_element()),
|
||||
single_example("Added", GitStatusIcon::new(added).into_any_element()),
|
||||
single_example("Deleted", GitStatusIcon::new(deleted).into_any_element()),
|
||||
single_example(
|
||||
"Conflicted",
|
||||
GitStatusIcon::new(conflict).into_any_element(),
|
||||
),
|
||||
])])
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1005,8 +1005,7 @@ impl Render for ProjectDiffToolbar {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement, IntoComponent)]
|
||||
#[component(scope = "Version Control")]
|
||||
#[derive(IntoElement, RegisterComponent)]
|
||||
pub struct ProjectDiffEmptyState {
|
||||
pub no_repo: bool,
|
||||
pub can_push_and_pull: bool,
|
||||
@@ -1178,8 +1177,12 @@ mod preview {
|
||||
use super::ProjectDiffEmptyState;
|
||||
|
||||
// View this component preview using `workspace: open component-preview`
|
||||
impl ComponentPreview for ProjectDiffEmptyState {
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> AnyElement {
|
||||
impl Component for ProjectDiffEmptyState {
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::VersionControl
|
||||
}
|
||||
|
||||
fn preview(_window: &mut Window, _cx: &mut App) -> Option<AnyElement> {
|
||||
let unknown_upstream: Option<UpstreamTracking> = None;
|
||||
let ahead_of_upstream: Option<UpstreamTracking> = Some(
|
||||
UpstreamTrackingStatus {
|
||||
@@ -1244,46 +1247,48 @@ mod preview {
|
||||
|
||||
let (width, height) = (px(480.), px(320.));
|
||||
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
example_group(vec![
|
||||
single_example(
|
||||
"No Repo",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(no_repo_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"No Changes",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(no_changes_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Unknown Upstream",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(unknown_upstream_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Ahead of Remote",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(ahead_of_upstream_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
Some(
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.children(vec![
|
||||
example_group(vec![
|
||||
single_example(
|
||||
"No Repo",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(no_repo_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"No Changes",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(no_changes_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Unknown Upstream",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(unknown_upstream_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Ahead of Remote",
|
||||
div()
|
||||
.w(width)
|
||||
.h(height)
|
||||
.child(ahead_of_upstream_state)
|
||||
.into_any_element(),
|
||||
),
|
||||
])
|
||||
.vertical(),
|
||||
])
|
||||
.vertical(),
|
||||
])
|
||||
.into_any_element()
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,13 +42,10 @@ use std::{
|
||||
/// }
|
||||
/// register_action!(Paste);
|
||||
/// ```
|
||||
pub trait Action: 'static + Send {
|
||||
pub trait Action: Any + Send {
|
||||
/// Clone the action into a new box
|
||||
fn boxed_clone(&self) -> Box<dyn Action>;
|
||||
|
||||
/// Cast the action to the any type
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
|
||||
/// Do a partial equality check on this action and the other
|
||||
fn partial_eq(&self, action: &dyn Action) -> bool;
|
||||
|
||||
@@ -94,9 +91,9 @@ impl std::fmt::Debug for dyn Action {
|
||||
}
|
||||
|
||||
impl dyn Action {
|
||||
/// Get the type id of this action
|
||||
pub fn type_id(&self) -> TypeId {
|
||||
self.as_any().type_id()
|
||||
/// Type-erase Action type.
|
||||
pub fn as_any(&self) -> &dyn Any {
|
||||
self as &dyn Any
|
||||
}
|
||||
}
|
||||
|
||||
@@ -557,9 +554,6 @@ macro_rules! __impl_action {
|
||||
::std::boxed::Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn ::std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
$($items)*
|
||||
}
|
||||
|
||||
@@ -597,10 +597,6 @@ mod tests {
|
||||
Box::new(TestAction)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn ::std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn build(_value: serde_json::Value) -> anyhow::Result<Box<dyn Action>>
|
||||
where
|
||||
Self: Sized,
|
||||
|
||||
@@ -513,9 +513,8 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4<f32> {
|
||||
let point = input.position.xy - quad.bounds.origin;
|
||||
let center_to_point = point - half_size;
|
||||
|
||||
// Signed distance field threshold for inclusion of pixels. Use of 0.5
|
||||
// instead of 1.0 causes the width of rounded borders to appear more
|
||||
// consistent with straight borders.
|
||||
// Signed distance field threshold for inclusion of pixels. 0.5 is the
|
||||
// minimum distance between the center of the pixel and the edge.
|
||||
let antialias_threshold = 0.5;
|
||||
|
||||
// Radius of the nearest corner
|
||||
@@ -612,24 +611,29 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4<f32> {
|
||||
|
||||
// Dashed border logic when border_style == 1
|
||||
if (quad.border_style == 1) {
|
||||
// Position in "dash space", where each dash period has length 1
|
||||
// Position along the perimeter in "dash space", where each dash
|
||||
// period has length 1
|
||||
var t = 0.0;
|
||||
|
||||
// Total number of dash periods, so that the dash spacing can be
|
||||
// adjusted to evenly divide it
|
||||
var max_t = 0.0;
|
||||
|
||||
// Since border width affects the dash size, the density of dashes
|
||||
// varies, and this is indicated by dash_velocity. It has units
|
||||
// (dash period / pixel). So a dash velocity of (1 / 10) is 1 dash
|
||||
// every 10 pixels.
|
||||
var dash_velocity = 0.0;
|
||||
|
||||
// Border width is proportional to dash size. This is the behavior
|
||||
// used by browsers, but also avoids dashes from different segments
|
||||
// overlapping when dash size is smaller than the border width.
|
||||
//
|
||||
// Dash pattern: (2 * border width) dash, (1 * border width) gap
|
||||
let dash_length_per_width = 2.0;
|
||||
let dash_gap_per_width = 1.0;
|
||||
let dash_period_per_width = dash_length_per_width + dash_gap_per_width;
|
||||
|
||||
// Since the dash size is determined by border width, the density of
|
||||
// dashes varies. Multiplying a pixel distance by this returns a
|
||||
// position in dash space - it has units (dash period / pixels). So
|
||||
// a dash velocity of (1 / 10) is 1 dash every 10 pixels.
|
||||
var dash_velocity = 0.0;
|
||||
|
||||
// Dividing this by the border width gives the dash velocity
|
||||
let dv_numerator = 1.0 / dash_period_per_width;
|
||||
|
||||
@@ -645,8 +649,8 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4<f32> {
|
||||
t = select(point.y, point.x, is_horizontal) * dash_velocity;
|
||||
max_t = select(size.y, size.x, is_horizontal) * dash_velocity;
|
||||
} else {
|
||||
// When corners are rounded, the dashes are laid out around the
|
||||
// whole perimeter.
|
||||
// When corners are rounded, the dashes are laid out clockwise
|
||||
// around the whole perimeter.
|
||||
|
||||
let r_tr = quad.corner_radii.top_right;
|
||||
let r_br = quad.corner_radii.bottom_right;
|
||||
@@ -694,23 +698,34 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4<f32> {
|
||||
if (is_near_rounded_corner) {
|
||||
let radians = atan2(corner_center_to_point.y,
|
||||
corner_center_to_point.x);
|
||||
let corner_t = radians * corner_radius;
|
||||
let corner_t = radians * corner_radius * dash_velocity;
|
||||
|
||||
if (center_to_point.x >= 0.0) {
|
||||
if (center_to_point.y < 0.0) {
|
||||
dash_velocity = corner_dash_velocity_tr;
|
||||
t = upto_r - corner_t * dash_velocity;
|
||||
// Subtracted because radians is pi/2 to 0 when
|
||||
// going clockwise around the top right corner,
|
||||
// since the y axis has been flipped
|
||||
t = upto_r - corner_t;
|
||||
} else {
|
||||
dash_velocity = corner_dash_velocity_br;
|
||||
t = upto_br + corner_t * dash_velocity;
|
||||
// Added because radians is 0 to pi/2 when going
|
||||
// clockwise around the bottom-right corner
|
||||
t = upto_br + corner_t;
|
||||
}
|
||||
} else {
|
||||
if (center_to_point.y >= 0.0) {
|
||||
dash_velocity = corner_dash_velocity_bl;
|
||||
t = upto_l - corner_t * dash_velocity;
|
||||
// Subtracted because radians is pi/2 to 0 when
|
||||
// going clockwise around the bottom-left corner,
|
||||
// since the x axis has been flipped
|
||||
t = upto_l - corner_t;
|
||||
} else {
|
||||
dash_velocity = corner_dash_velocity_tl;
|
||||
t = upto_tl + corner_t * dash_velocity;
|
||||
// Added because radians is 0 to pi/2 when going
|
||||
// clockwise around the top-left corner, since both
|
||||
// axis were flipped
|
||||
t = upto_tl + corner_t;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -121,7 +121,8 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]],
|
||||
float2 point = input.position.xy - float2(quad.bounds.origin.x, quad.bounds.origin.y);
|
||||
float2 center_to_point = point - half_size;
|
||||
|
||||
// Signed distance field threshold for inclusion of pixels
|
||||
// Signed distance field threshold for inclusion of pixels. 0.5 is the
|
||||
// minimum distance between the center of the pixel and the edge.
|
||||
const float antialias_threshold = 0.5;
|
||||
|
||||
// Radius of the nearest corner
|
||||
@@ -211,24 +212,29 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]],
|
||||
|
||||
// Dashed border logic when border_style == 1
|
||||
if (quad.border_style == 1) {
|
||||
// Position in "dash space", where each dash period has length 1
|
||||
// Position along the perimeter in "dash space", where each dash
|
||||
// period has length 1
|
||||
float t = 0.0;
|
||||
|
||||
// Total number of dash periods, so that the dash spacing can be
|
||||
// adjusted to evenly divide it
|
||||
float max_t = 0.0;
|
||||
|
||||
// Since border width affects the dash size, the density of dashes
|
||||
// varies, and this is indicated by dash_velocity. It has units
|
||||
// (dash period / pixel). So a dash velocity of (1 / 10) is 1 dash
|
||||
// every 10 pixels.
|
||||
float dash_velocity = 0.0;
|
||||
|
||||
// Border width is proportional to dash size. This is the behavior
|
||||
// used by browsers, but also avoids dashes from different segments
|
||||
// overlapping when dash size is smaller than the border width.
|
||||
//
|
||||
// Dash pattern: (2 * border width) dash, (1 * border width) gap
|
||||
const float dash_length_per_width = 2.0;
|
||||
const float dash_gap_per_width = 1.0;
|
||||
const float dash_period_per_width = dash_length_per_width + dash_gap_per_width;
|
||||
|
||||
// Since the dash size is determined by border width, the density of
|
||||
// dashes varies. Multiplying a pixel distance by this returns a
|
||||
// position in dash space - it has units (dash period / pixels). So
|
||||
// a dash velocity of (1 / 10) is 1 dash every 10 pixels.
|
||||
float dash_velocity = 0.0;
|
||||
|
||||
// Dividing this by the border width gives the dash velocity
|
||||
const float dv_numerator = 1.0 / dash_period_per_width;
|
||||
|
||||
@@ -244,8 +250,8 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]],
|
||||
max_t = is_horizontal ? size.x : size.y;
|
||||
max_t *= dash_velocity;
|
||||
} else {
|
||||
// When corners are rounded, the dashes are laid out around the
|
||||
// whole perimeter.
|
||||
// When corners are rounded, the dashes are laid out clockwise
|
||||
// around the whole perimeter.
|
||||
|
||||
float r_tr = quad.corner_radii.top_right;
|
||||
float r_br = quad.corner_radii.bottom_right;
|
||||
@@ -292,23 +298,34 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]],
|
||||
|
||||
if (is_near_rounded_corner) {
|
||||
float radians = atan2(corner_center_to_point.y, corner_center_to_point.x);
|
||||
float corner_t = radians * corner_radius;
|
||||
float corner_t = radians * corner_radius * dash_velocity;
|
||||
|
||||
if (center_to_point.x >= 0.0) {
|
||||
if (center_to_point.y < 0.0) {
|
||||
dash_velocity = corner_dash_velocity_tr;
|
||||
t = upto_r - corner_t * dash_velocity;
|
||||
// Subtracted because radians is pi/2 to 0 when
|
||||
// going clockwise around the top right corner,
|
||||
// since the y axis has been flipped
|
||||
t = upto_r - corner_t;
|
||||
} else {
|
||||
dash_velocity = corner_dash_velocity_br;
|
||||
t = upto_br + corner_t * dash_velocity;
|
||||
// Added because radians is 0 to pi/2 when going
|
||||
// clockwise around the bottom-right corner
|
||||
t = upto_br + corner_t;
|
||||
}
|
||||
} else {
|
||||
if (center_to_point.y >= 0.0) {
|
||||
dash_velocity = corner_dash_velocity_bl;
|
||||
t = upto_l - corner_t * dash_velocity;
|
||||
// Subtracted because radians is pi/1 to 0 when
|
||||
// going clockwise around the bottom-left corner,
|
||||
// since the x axis has been flipped
|
||||
t = upto_l - corner_t;
|
||||
} else {
|
||||
dash_velocity = corner_dash_velocity_tl;
|
||||
t = upto_tl + corner_t * dash_velocity;
|
||||
// Added because radians is 0 to pi/2 when going
|
||||
// clockwise around the top-left corner, since both
|
||||
// axis were flipped
|
||||
t = upto_tl + corner_t;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -22,10 +22,6 @@ fn test_action_macros() {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn partial_eq(&self, _action: &dyn gpui::Action) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
@@ -306,7 +306,7 @@ pub enum BufferEvent {
|
||||
}
|
||||
|
||||
/// The file associated with a buffer.
|
||||
pub trait File: Send + Sync {
|
||||
pub trait File: Send + Sync + Any {
|
||||
/// Returns the [`LocalFile`] associated with this file, if the
|
||||
/// file is local.
|
||||
fn as_local(&self) -> Option<&dyn LocalFile>;
|
||||
@@ -336,9 +336,6 @@ pub trait File: Send + Sync {
|
||||
/// This is needed for looking up project-specific settings.
|
||||
fn worktree_id(&self, cx: &App) -> WorktreeId;
|
||||
|
||||
/// Converts this file into an [`Any`] trait object.
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
|
||||
/// Converts this file into a protobuf message.
|
||||
fn to_proto(&self, cx: &App) -> rpc::proto::File;
|
||||
|
||||
@@ -4610,10 +4607,6 @@ impl File for TestFile {
|
||||
WorktreeId::from_usize(0)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn to_proto(&self, _: &App) -> rpc::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
@@ -572,7 +572,11 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
}
|
||||
|
||||
/// Support custom initialize params.
|
||||
fn prepare_initialize_params(&self, original: InitializeParams) -> Result<InitializeParams> {
|
||||
fn prepare_initialize_params(
|
||||
&self,
|
||||
original: InitializeParams,
|
||||
_: &App,
|
||||
) -> Result<InitializeParams> {
|
||||
Ok(original)
|
||||
}
|
||||
|
||||
|
||||
@@ -370,7 +370,7 @@ fn default_words_completion_mode() -> WordsCompletionMode {
|
||||
}
|
||||
|
||||
fn default_lsp_insert_mode() -> LspInsertMode {
|
||||
LspInsertMode::Insert
|
||||
LspInsertMode::ReplaceSuffix
|
||||
}
|
||||
|
||||
fn default_lsp_fetch_timeout_ms() -> u64 {
|
||||
@@ -1029,7 +1029,10 @@ fn scroll_debounce_ms() -> u64 {
|
||||
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize, JsonSchema)]
|
||||
pub struct LanguageTaskConfig {
|
||||
/// Extra task variables to set for a particular language.
|
||||
#[serde(default)]
|
||||
pub variables: HashMap<String, String>,
|
||||
#[serde(default = "default_true")]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
impl InlayHintSettings {
|
||||
|
||||
@@ -5,6 +5,7 @@ use crate::{LanguageToolchainStore, Location, Runnable};
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task};
|
||||
use lsp::LanguageServerName;
|
||||
use task::{TaskTemplates, TaskVariables};
|
||||
use text::BufferId;
|
||||
|
||||
@@ -15,6 +16,7 @@ pub struct RunnableRange {
|
||||
pub runnable: Runnable,
|
||||
pub extra_captures: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Language Contexts are used by Zed tasks to extract information about the source file where the tasks are supposed to be scheduled from.
|
||||
/// Multiple context providers may be used together: by default, Zed provides a base [`BasicContextProvider`] context that fills all non-custom [`VariableName`] variants.
|
||||
///
|
||||
@@ -40,4 +42,9 @@ pub trait ContextProvider: Send + Sync {
|
||||
) -> Option<TaskTemplates> {
|
||||
None
|
||||
}
|
||||
|
||||
/// A language server name, that can return tasks using LSP (ext) for this language.
|
||||
fn lsp_task_source(&self) -> Option<LanguageServerName> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
use gpui::AsyncApp;
|
||||
use gpui::{App, AsyncApp};
|
||||
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
|
||||
pub use language::*;
|
||||
use lsp::{DiagnosticTag, InitializeParams, LanguageServerBinary, LanguageServerName};
|
||||
@@ -273,6 +273,7 @@ impl super::LspAdapter for CLspAdapter {
|
||||
fn prepare_initialize_params(
|
||||
&self,
|
||||
mut original: InitializeParams,
|
||||
_: &App,
|
||||
) -> Result<InitializeParams> {
|
||||
let experimental = json!({
|
||||
"textDocument": {
|
||||
|
||||
@@ -991,6 +991,7 @@ impl LspAdapter for PyLspAdapter {
|
||||
util::command::new_smol_command(pip_path.as_path())
|
||||
.arg("install")
|
||||
.arg("python-lsp-server")
|
||||
.arg("-U")
|
||||
.output()
|
||||
.await?
|
||||
.status
|
||||
@@ -1001,6 +1002,7 @@ impl LspAdapter for PyLspAdapter {
|
||||
util::command::new_smol_command(pip_path.as_path())
|
||||
.arg("install")
|
||||
.arg("python-lsp-server[all]")
|
||||
.arg("-U")
|
||||
.output()
|
||||
.await?
|
||||
.status
|
||||
@@ -1011,6 +1013,7 @@ impl LspAdapter for PyLspAdapter {
|
||||
util::command::new_smol_command(pip_path)
|
||||
.arg("install")
|
||||
.arg("pylsp-mypy")
|
||||
.arg("-U")
|
||||
.output()
|
||||
.await?
|
||||
.status
|
||||
|
||||
@@ -7,8 +7,11 @@ use gpui::{App, AsyncApp, SharedString, Task};
|
||||
use http_client::github::AssetKind;
|
||||
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
|
||||
pub use language::*;
|
||||
use lsp::LanguageServerBinary;
|
||||
use lsp::{InitializeParams, LanguageServerBinary};
|
||||
use project::project_settings::ProjectSettings;
|
||||
use regex::Regex;
|
||||
use serde_json::json;
|
||||
use settings::Settings as _;
|
||||
use smol::fs::{self};
|
||||
use std::fmt::Display;
|
||||
use std::{
|
||||
@@ -18,6 +21,7 @@ use std::{
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
use task::{TaskTemplate, TaskTemplates, TaskType, TaskVariables, VariableName};
|
||||
use util::merge_json_value_into;
|
||||
use util::{ResultExt, fs::remove_matching, maybe};
|
||||
|
||||
use crate::language_settings::language_settings;
|
||||
@@ -48,9 +52,9 @@ impl RustLspAdapter {
|
||||
const ARCH_SERVER_NAME: &str = "pc-windows-msvc";
|
||||
}
|
||||
|
||||
impl RustLspAdapter {
|
||||
const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer");
|
||||
const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer");
|
||||
|
||||
impl RustLspAdapter {
|
||||
fn build_asset_name() -> String {
|
||||
let extension = match Self::GITHUB_ASSET_KIND {
|
||||
AssetKind::TarGz => "tar.gz",
|
||||
@@ -60,7 +64,7 @@ impl RustLspAdapter {
|
||||
|
||||
format!(
|
||||
"{}-{}-{}.{}",
|
||||
Self::SERVER_NAME,
|
||||
SERVER_NAME,
|
||||
std::env::consts::ARCH,
|
||||
Self::ARCH_SERVER_NAME,
|
||||
extension
|
||||
@@ -98,7 +102,7 @@ impl ManifestProvider for CargoManifestProvider {
|
||||
#[async_trait(?Send)]
|
||||
impl LspAdapter for RustLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
Self::SERVER_NAME.clone()
|
||||
SERVER_NAME.clone()
|
||||
}
|
||||
|
||||
fn manifest_name(&self) -> Option<ManifestName> {
|
||||
@@ -473,6 +477,30 @@ impl LspAdapter for RustLspAdapter {
|
||||
filter_range,
|
||||
})
|
||||
}
|
||||
|
||||
fn prepare_initialize_params(
|
||||
&self,
|
||||
mut original: InitializeParams,
|
||||
cx: &App,
|
||||
) -> Result<InitializeParams> {
|
||||
let enable_lsp_tasks = ProjectSettings::get_global(cx)
|
||||
.lsp
|
||||
.get(&SERVER_NAME)
|
||||
.map_or(false, |s| s.enable_lsp_tasks);
|
||||
if enable_lsp_tasks {
|
||||
let experimental = json!({
|
||||
"runnables": {
|
||||
"kinds": [ "cargo", "shell" ],
|
||||
},
|
||||
});
|
||||
if let Some(ref mut original_experimental) = original.capabilities.experimental {
|
||||
merge_json_value_into(experimental, original_experimental);
|
||||
} else {
|
||||
original.capabilities.experimental = Some(experimental);
|
||||
}
|
||||
}
|
||||
Ok(original)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct RustContextProvider;
|
||||
@@ -776,6 +804,10 @@ impl ContextProvider for RustContextProvider {
|
||||
|
||||
Some(TaskTemplates(task_templates))
|
||||
}
|
||||
|
||||
fn lsp_task_source(&self) -> Option<LanguageServerName> {
|
||||
Some(SERVER_NAME)
|
||||
}
|
||||
}
|
||||
|
||||
/// Part of the data structure of Cargo metadata
|
||||
|
||||
@@ -1718,21 +1718,25 @@ impl MultiBuffer {
|
||||
(None, None) => break,
|
||||
(None, Some(_)) => {
|
||||
let existing_id = existing_iter.next().unwrap();
|
||||
let locator = snapshot.excerpt_locator_for_id(existing_id);
|
||||
let existing_excerpt = excerpts_cursor.item().unwrap();
|
||||
excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &());
|
||||
let existing_end = existing_excerpt
|
||||
.range
|
||||
.context
|
||||
.end
|
||||
.to_point(&buffer_snapshot);
|
||||
if let Some((new_id, last)) = to_insert.last() {
|
||||
if existing_end <= last.context.end {
|
||||
self.snapshot
|
||||
.borrow_mut()
|
||||
.replaced_excerpts
|
||||
.insert(existing_id, *new_id);
|
||||
}
|
||||
let locator = snapshot.excerpt_locator_for_id(existing_id);
|
||||
excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &());
|
||||
if let Some(existing_excerpt) = excerpts_cursor
|
||||
.item()
|
||||
.filter(|e| e.buffer_id == buffer_snapshot.remote_id())
|
||||
{
|
||||
let existing_end = existing_excerpt
|
||||
.range
|
||||
.context
|
||||
.end
|
||||
.to_point(&buffer_snapshot);
|
||||
if existing_end <= last.context.end {
|
||||
self.snapshot
|
||||
.borrow_mut()
|
||||
.replaced_excerpts
|
||||
.insert(existing_id, *new_id);
|
||||
}
|
||||
};
|
||||
}
|
||||
to_remove.push(existing_id);
|
||||
continue;
|
||||
@@ -1745,16 +1749,14 @@ impl MultiBuffer {
|
||||
};
|
||||
let locator = snapshot.excerpt_locator_for_id(*existing);
|
||||
excerpts_cursor.seek_forward(&Some(locator), Bias::Left, &());
|
||||
let Some(existing_excerpt) = excerpts_cursor.item() else {
|
||||
let Some(existing_excerpt) = excerpts_cursor
|
||||
.item()
|
||||
.filter(|e| e.buffer_id == buffer_snapshot.remote_id())
|
||||
else {
|
||||
to_remove.push(existing_iter.next().unwrap());
|
||||
to_insert.push((next_excerpt_id(), new_iter.next().unwrap()));
|
||||
continue;
|
||||
};
|
||||
if existing_excerpt.buffer_id != buffer_snapshot.remote_id() {
|
||||
to_remove.push(existing_iter.next().unwrap());
|
||||
to_insert.push((next_excerpt_id(), new_iter.next().unwrap()));
|
||||
continue;
|
||||
}
|
||||
|
||||
let existing_start = existing_excerpt
|
||||
.range
|
||||
|
||||
@@ -1798,6 +1798,88 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
|
||||
let buf1 = cx.new(|cx| {
|
||||
Buffer::local(
|
||||
indoc! {
|
||||
"zero
|
||||
one
|
||||
two
|
||||
three
|
||||
four
|
||||
five
|
||||
six
|
||||
seven
|
||||
",
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let path: PathKey = PathKey::namespaced(0, Path::new("/").into());
|
||||
let buf2 = cx.new(|cx| {
|
||||
Buffer::local(
|
||||
indoc! {
|
||||
"000
|
||||
111
|
||||
222
|
||||
333
|
||||
"
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
path.clone(),
|
||||
buf1.clone(),
|
||||
vec![Point::row_range(1..1), Point::row_range(4..5)],
|
||||
1,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
assert_excerpts_match(
|
||||
&multibuffer,
|
||||
cx,
|
||||
indoc! {
|
||||
"-----
|
||||
zero
|
||||
one
|
||||
two
|
||||
-----
|
||||
three
|
||||
four
|
||||
five
|
||||
six
|
||||
"
|
||||
},
|
||||
);
|
||||
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
path.clone(),
|
||||
buf2.clone(),
|
||||
vec![Point::row_range(0..1)],
|
||||
2,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
assert_excerpts_match(
|
||||
&multibuffer,
|
||||
cx,
|
||||
indoc! {"-----
|
||||
000
|
||||
111
|
||||
222
|
||||
333
|
||||
"},
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
let base_text_1 = indoc!(
|
||||
|
||||
@@ -33,8 +33,7 @@ impl From<IconName> for ToastIcon {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoComponent)]
|
||||
#[component(scope = "Notification")]
|
||||
#[derive(RegisterComponent)]
|
||||
pub struct StatusToast {
|
||||
icon: Option<ToastIcon>,
|
||||
text: SharedString,
|
||||
@@ -135,8 +134,12 @@ impl Focusable for StatusToast {
|
||||
|
||||
impl EventEmitter<DismissEvent> for StatusToast {}
|
||||
|
||||
impl ComponentPreview for StatusToast {
|
||||
fn preview(_window: &mut Window, cx: &mut App) -> AnyElement {
|
||||
impl Component for StatusToast {
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::Notification
|
||||
}
|
||||
|
||||
fn preview(_window: &mut Window, cx: &mut App) -> Option<AnyElement> {
|
||||
let text_example = StatusToast::new("Operation completed", cx, |this, _| this);
|
||||
|
||||
let action_example = StatusToast::new("Update ready to install", cx, |this, _cx| {
|
||||
@@ -175,29 +178,40 @@ impl ComponentPreview for StatusToast {
|
||||
})
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.p_4()
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Basic Toast",
|
||||
vec![
|
||||
single_example("Text", div().child(text_example).into_any_element()),
|
||||
single_example("Action", div().child(action_example).into_any_element()),
|
||||
single_example("Icon", div().child(icon_example).into_any_element()),
|
||||
],
|
||||
),
|
||||
example_group_with_title(
|
||||
"Examples",
|
||||
vec![
|
||||
single_example("Success", div().child(success_example).into_any_element()),
|
||||
single_example("Error", div().child(error_example).into_any_element()),
|
||||
single_example("Warning", div().child(warning_example).into_any_element()),
|
||||
single_example("Create PR", div().child(pr_example).into_any_element()),
|
||||
],
|
||||
)
|
||||
.vertical(),
|
||||
])
|
||||
.into_any_element()
|
||||
Some(
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.p_4()
|
||||
.children(vec![
|
||||
example_group_with_title(
|
||||
"Basic Toast",
|
||||
vec![
|
||||
single_example("Text", div().child(text_example).into_any_element()),
|
||||
single_example(
|
||||
"Action",
|
||||
div().child(action_example).into_any_element(),
|
||||
),
|
||||
single_example("Icon", div().child(icon_example).into_any_element()),
|
||||
],
|
||||
),
|
||||
example_group_with_title(
|
||||
"Examples",
|
||||
vec![
|
||||
single_example(
|
||||
"Success",
|
||||
div().child(success_example).into_any_element(),
|
||||
),
|
||||
single_example("Error", div().child(error_example).into_any_element()),
|
||||
single_example(
|
||||
"Warning",
|
||||
div().child(warning_example).into_any_element(),
|
||||
),
|
||||
single_example("Create PR", div().child(pr_example).into_any_element()),
|
||||
],
|
||||
)
|
||||
.vertical(),
|
||||
])
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -746,8 +746,7 @@ pub struct Session {
|
||||
_background_tasks: Vec<Task<()>>,
|
||||
}
|
||||
|
||||
trait CacheableCommand: 'static + Send + Sync {
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
trait CacheableCommand: Any + Send + Sync {
|
||||
fn dyn_eq(&self, rhs: &dyn CacheableCommand) -> bool;
|
||||
fn dyn_hash(&self, hasher: &mut dyn Hasher);
|
||||
fn as_any_arc(self: Arc<Self>) -> Arc<dyn Any + Send + Sync>;
|
||||
@@ -757,12 +756,8 @@ impl<T> CacheableCommand for T
|
||||
where
|
||||
T: DapCommand + PartialEq + Eq + Hash,
|
||||
{
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn dyn_eq(&self, rhs: &dyn CacheableCommand) -> bool {
|
||||
rhs.as_any()
|
||||
(rhs as &dyn Any)
|
||||
.downcast_ref::<Self>()
|
||||
.map_or(false, |rhs| self == rhs)
|
||||
}
|
||||
@@ -795,7 +790,7 @@ impl Eq for RequestSlot {}
|
||||
impl Hash for RequestSlot {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.dyn_hash(state);
|
||||
self.0.as_any().type_id().hash(state)
|
||||
(&*self.0 as &dyn Any).type_id().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1345,7 +1340,7 @@ impl Session {
|
||||
|
||||
fn invalidate_state(&mut self, key: &RequestSlot) {
|
||||
self.requests
|
||||
.entry(key.0.as_any().type_id())
|
||||
.entry((&*key.0 as &dyn Any).type_id())
|
||||
.and_modify(|request_map| {
|
||||
request_map.remove(&key);
|
||||
});
|
||||
|
||||
@@ -17,9 +17,7 @@ use gpui::{App, AsyncApp, Entity};
|
||||
use language::{
|
||||
Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, OffsetRangeExt, PointUtf16,
|
||||
ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
language_settings::{
|
||||
AllLanguageSettings, InlayHintKind, LanguageSettings, LspInsertMode, language_settings,
|
||||
},
|
||||
language_settings::{InlayHintKind, LanguageSettings, language_settings},
|
||||
point_from_lsp, point_to_lsp,
|
||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||
range_from_lsp, range_to_lsp,
|
||||
@@ -30,7 +28,6 @@ use lsp::{
|
||||
LanguageServer, LanguageServerId, LinkedEditingRangeServerCapabilities, OneOf, RenameOptions,
|
||||
ServerCapabilities,
|
||||
};
|
||||
use settings::Settings as _;
|
||||
use signature_help::{lsp_to_proto_signature, proto_to_lsp_signature};
|
||||
use std::{cmp::Reverse, mem, ops::Range, path::Path, sync::Arc};
|
||||
use text::{BufferId, LineEnding};
|
||||
@@ -977,62 +974,69 @@ async fn location_links_from_proto(
|
||||
let mut links = Vec::new();
|
||||
|
||||
for link in proto_links {
|
||||
let origin = match link.origin {
|
||||
Some(origin) => {
|
||||
let buffer_id = BufferId::new(origin.buffer_id)?;
|
||||
let buffer = lsp_store
|
||||
.update(&mut cx, |lsp_store, cx| {
|
||||
lsp_store.wait_for_remote_buffer(buffer_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
let start = origin
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin start"))?;
|
||||
let end = origin
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin end"))?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
Some(Location {
|
||||
buffer,
|
||||
range: start..end,
|
||||
})
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
|
||||
let buffer_id = BufferId::new(target.buffer_id)?;
|
||||
let buffer = lsp_store
|
||||
.update(&mut cx, |lsp_store, cx| {
|
||||
lsp_store.wait_for_remote_buffer(buffer_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
let start = target
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
let end = target
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
let target = Location {
|
||||
buffer,
|
||||
range: start..end,
|
||||
};
|
||||
|
||||
links.push(LocationLink { origin, target })
|
||||
links.push(location_link_from_proto(link, &lsp_store, &mut cx).await?)
|
||||
}
|
||||
|
||||
Ok(links)
|
||||
}
|
||||
|
||||
pub async fn location_link_from_proto(
|
||||
link: proto::LocationLink,
|
||||
lsp_store: &Entity<LspStore>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<LocationLink> {
|
||||
let origin = match link.origin {
|
||||
Some(origin) => {
|
||||
let buffer_id = BufferId::new(origin.buffer_id)?;
|
||||
let buffer = lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.wait_for_remote_buffer(buffer_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
let start = origin
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin start"))?;
|
||||
let end = origin
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin end"))?;
|
||||
buffer
|
||||
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
Some(Location {
|
||||
buffer,
|
||||
range: start..end,
|
||||
})
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
|
||||
let buffer_id = BufferId::new(target.buffer_id)?;
|
||||
let buffer = lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.wait_for_remote_buffer(buffer_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
let start = target
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
let end = target
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
buffer
|
||||
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
let target = Location {
|
||||
buffer,
|
||||
range: start..end,
|
||||
};
|
||||
Ok(LocationLink { origin, target })
|
||||
}
|
||||
|
||||
async fn location_links_from_lsp(
|
||||
message: Option<lsp::GotoDefinitionResponse>,
|
||||
lsp_store: Entity<LspStore>,
|
||||
@@ -1115,6 +1119,65 @@ async fn location_links_from_lsp(
|
||||
Ok(definitions)
|
||||
}
|
||||
|
||||
pub async fn location_link_from_lsp(
|
||||
link: lsp::LocationLink,
|
||||
lsp_store: &Entity<LspStore>,
|
||||
buffer: &Entity<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<LocationLink> {
|
||||
let (lsp_adapter, language_server) =
|
||||
language_server_for_buffer(&lsp_store, &buffer, server_id, cx)?;
|
||||
|
||||
let (origin_range, target_uri, target_range) = (
|
||||
link.origin_selection_range,
|
||||
link.target_uri,
|
||||
link.target_selection_range,
|
||||
);
|
||||
|
||||
let target_buffer_handle = lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.open_local_buffer_via_lsp(
|
||||
target_uri,
|
||||
language_server.server_id(),
|
||||
lsp_adapter.name.clone(),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| {
|
||||
let origin_location = origin_range.map(|origin_range| {
|
||||
let origin_buffer = buffer.read(cx);
|
||||
let origin_start =
|
||||
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
|
||||
let origin_end =
|
||||
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left);
|
||||
Location {
|
||||
buffer: buffer.clone(),
|
||||
range: origin_buffer.anchor_after(origin_start)
|
||||
..origin_buffer.anchor_before(origin_end),
|
||||
}
|
||||
});
|
||||
|
||||
let target_buffer = target_buffer_handle.read(cx);
|
||||
let target_start =
|
||||
target_buffer.clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
|
||||
let target_end =
|
||||
target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
|
||||
let target_location = Location {
|
||||
buffer: target_buffer_handle,
|
||||
range: target_buffer.anchor_after(target_start)
|
||||
..target_buffer.anchor_before(target_end),
|
||||
};
|
||||
|
||||
LocationLink {
|
||||
origin: origin_location,
|
||||
target: target_location,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn location_links_to_proto(
|
||||
links: Vec<LocationLink>,
|
||||
lsp_store: &mut LspStore,
|
||||
@@ -1123,45 +1186,52 @@ fn location_links_to_proto(
|
||||
) -> Vec<proto::LocationLink> {
|
||||
links
|
||||
.into_iter()
|
||||
.map(|definition| {
|
||||
let origin = definition.origin.map(|origin| {
|
||||
lsp_store
|
||||
.buffer_store()
|
||||
.update(cx, |buffer_store, cx| {
|
||||
buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let buffer_id = origin.buffer.read(cx).remote_id().into();
|
||||
proto::Location {
|
||||
start: Some(serialize_anchor(&origin.range.start)),
|
||||
end: Some(serialize_anchor(&origin.range.end)),
|
||||
buffer_id,
|
||||
}
|
||||
});
|
||||
|
||||
lsp_store
|
||||
.buffer_store()
|
||||
.update(cx, |buffer_store, cx| {
|
||||
buffer_store.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let buffer_id = definition.target.buffer.read(cx).remote_id().into();
|
||||
let target = proto::Location {
|
||||
start: Some(serialize_anchor(&definition.target.range.start)),
|
||||
end: Some(serialize_anchor(&definition.target.range.end)),
|
||||
buffer_id,
|
||||
};
|
||||
|
||||
proto::LocationLink {
|
||||
origin,
|
||||
target: Some(target),
|
||||
}
|
||||
})
|
||||
.map(|definition| location_link_to_proto(definition, lsp_store, peer_id, cx))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn location_link_to_proto(
|
||||
location: LocationLink,
|
||||
lsp_store: &mut LspStore,
|
||||
peer_id: PeerId,
|
||||
cx: &mut App,
|
||||
) -> proto::LocationLink {
|
||||
let origin = location.origin.map(|origin| {
|
||||
lsp_store
|
||||
.buffer_store()
|
||||
.update(cx, |buffer_store, cx| {
|
||||
buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let buffer_id = origin.buffer.read(cx).remote_id().into();
|
||||
proto::Location {
|
||||
start: Some(serialize_anchor(&origin.range.start)),
|
||||
end: Some(serialize_anchor(&origin.range.end)),
|
||||
buffer_id,
|
||||
}
|
||||
});
|
||||
|
||||
lsp_store
|
||||
.buffer_store()
|
||||
.update(cx, |buffer_store, cx| {
|
||||
buffer_store.create_buffer_for_peer(&location.target.buffer, peer_id, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let buffer_id = location.target.buffer.read(cx).remote_id().into();
|
||||
let target = proto::Location {
|
||||
start: Some(serialize_anchor(&location.target.range.start)),
|
||||
end: Some(serialize_anchor(&location.target.range.end)),
|
||||
buffer_id,
|
||||
};
|
||||
|
||||
proto::LocationLink {
|
||||
origin,
|
||||
target: Some(target),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspCommand for GetReferences {
|
||||
type Response = Vec<Location>;
|
||||
@@ -2088,7 +2158,7 @@ impl LspCommand for GetCompletions {
|
||||
.map(Arc::new);
|
||||
|
||||
let mut completion_edits = Vec::new();
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.update(&mut cx, |buffer, _cx| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
|
||||
|
||||
@@ -2125,21 +2195,11 @@ impl LspCommand for GetCompletions {
|
||||
// If the language server provides a range to overwrite, then
|
||||
// check that the range is valid.
|
||||
Some(completion_text_edit) => {
|
||||
let completion_mode = AllLanguageSettings::get_global(cx)
|
||||
.defaults
|
||||
.completions
|
||||
.lsp_insert_mode;
|
||||
|
||||
match parse_completion_text_edit(
|
||||
&completion_text_edit,
|
||||
&snapshot,
|
||||
completion_mode,
|
||||
) {
|
||||
match parse_completion_text_edit(&completion_text_edit, &snapshot) {
|
||||
Some(edit) => edit,
|
||||
None => return false,
|
||||
}
|
||||
}
|
||||
|
||||
// If the language server does not provide a range, then infer
|
||||
// the range based on the syntax tree.
|
||||
None => {
|
||||
@@ -2191,7 +2251,12 @@ impl LspCommand for GetCompletions {
|
||||
.as_ref()
|
||||
.unwrap_or(&lsp_completion.label)
|
||||
.clone();
|
||||
(range, text)
|
||||
|
||||
ParsedCompletionEdit {
|
||||
replace_range: range,
|
||||
insert_range: None,
|
||||
new_text: text,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2207,8 +2272,8 @@ impl LspCommand for GetCompletions {
|
||||
Ok(completions
|
||||
.into_iter()
|
||||
.zip(completion_edits)
|
||||
.map(|(mut lsp_completion, (old_range, mut new_text))| {
|
||||
LineEnding::normalize(&mut new_text);
|
||||
.map(|(mut lsp_completion, mut edit)| {
|
||||
LineEnding::normalize(&mut edit.new_text);
|
||||
if lsp_completion.data.is_none() {
|
||||
if let Some(default_data) = lsp_defaults
|
||||
.as_ref()
|
||||
@@ -2220,9 +2285,10 @@ impl LspCommand for GetCompletions {
|
||||
}
|
||||
}
|
||||
CoreCompletion {
|
||||
old_range,
|
||||
new_text,
|
||||
replace_range: edit.replace_range,
|
||||
new_text: edit.new_text,
|
||||
source: CompletionSource::Lsp {
|
||||
insert_range: edit.insert_range,
|
||||
server_id,
|
||||
lsp_completion: Box::new(lsp_completion),
|
||||
lsp_defaults: lsp_defaults.clone(),
|
||||
@@ -2312,91 +2378,53 @@ impl LspCommand for GetCompletions {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ParsedCompletionEdit {
|
||||
pub replace_range: Range<Anchor>,
|
||||
pub insert_range: Option<Range<Anchor>>,
|
||||
pub new_text: String,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_completion_text_edit(
|
||||
edit: &lsp::CompletionTextEdit,
|
||||
snapshot: &BufferSnapshot,
|
||||
completion_mode: LspInsertMode,
|
||||
) -> Option<(Range<Anchor>, String)> {
|
||||
match edit {
|
||||
lsp::CompletionTextEdit::Edit(edit) => {
|
||||
let range = range_from_lsp(edit.range);
|
||||
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
||||
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
||||
if start != range.start.0 || end != range.end.0 {
|
||||
log::info!("completion out of expected range");
|
||||
None
|
||||
} else {
|
||||
Some((
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
edit.new_text.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
) -> Option<ParsedCompletionEdit> {
|
||||
let (replace_range, insert_range, new_text) = match edit {
|
||||
lsp::CompletionTextEdit::Edit(edit) => (edit.range, None, &edit.new_text),
|
||||
lsp::CompletionTextEdit::InsertAndReplace(edit) => {
|
||||
let replace = match completion_mode {
|
||||
LspInsertMode::Insert => false,
|
||||
LspInsertMode::Replace => true,
|
||||
LspInsertMode::ReplaceSubsequence => {
|
||||
let range_to_replace = range_from_lsp(edit.replace);
|
||||
(edit.replace, Some(edit.insert), &edit.new_text)
|
||||
}
|
||||
};
|
||||
|
||||
let start = snapshot.clip_point_utf16(range_to_replace.start, Bias::Left);
|
||||
let end = snapshot.clip_point_utf16(range_to_replace.end, Bias::Left);
|
||||
if start != range_to_replace.start.0 || end != range_to_replace.end.0 {
|
||||
false
|
||||
} else {
|
||||
let mut completion_text = edit.new_text.chars();
|
||||
|
||||
let mut text_to_replace = snapshot.chars_for_range(
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
);
|
||||
|
||||
// is `text_to_replace` a subsequence of `completion_text`
|
||||
text_to_replace.all(|needle_ch| {
|
||||
completion_text.any(|haystack_ch| haystack_ch == needle_ch)
|
||||
})
|
||||
}
|
||||
}
|
||||
LspInsertMode::ReplaceSuffix => {
|
||||
let range_after_cursor = lsp::Range {
|
||||
start: edit.insert.end,
|
||||
end: edit.replace.end,
|
||||
};
|
||||
let range_after_cursor = range_from_lsp(range_after_cursor);
|
||||
|
||||
let start = snapshot.clip_point_utf16(range_after_cursor.start, Bias::Left);
|
||||
let end = snapshot.clip_point_utf16(range_after_cursor.end, Bias::Left);
|
||||
if start != range_after_cursor.start.0 || end != range_after_cursor.end.0 {
|
||||
false
|
||||
} else {
|
||||
let text_after_cursor = snapshot
|
||||
.text_for_range(
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
)
|
||||
.collect::<String>();
|
||||
edit.new_text.ends_with(&text_after_cursor)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let range = range_from_lsp(match replace {
|
||||
true => edit.replace,
|
||||
false => edit.insert,
|
||||
});
|
||||
let replace_range = {
|
||||
let range = range_from_lsp(replace_range);
|
||||
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
||||
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
||||
if start != range.start.0 || end != range.end.0 {
|
||||
log::info!("completion out of expected range");
|
||||
return None;
|
||||
}
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end)
|
||||
};
|
||||
|
||||
let insert_range = match insert_range {
|
||||
None => None,
|
||||
Some(insert_range) => {
|
||||
let range = range_from_lsp(insert_range);
|
||||
let start = snapshot.clip_point_utf16(range.start, Bias::Left);
|
||||
let end = snapshot.clip_point_utf16(range.end, Bias::Left);
|
||||
if start != range.start.0 || end != range.end.0 {
|
||||
log::info!("completion out of expected range");
|
||||
None
|
||||
} else {
|
||||
Some((
|
||||
snapshot.anchor_before(start)..snapshot.anchor_after(end),
|
||||
edit.new_text.clone(),
|
||||
))
|
||||
log::info!("completion (insert) out of expected range");
|
||||
return None;
|
||||
}
|
||||
Some(snapshot.anchor_before(start)..snapshot.anchor_after(end))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Some(ParsedCompletionEdit {
|
||||
insert_range: insert_range,
|
||||
replace_range: replace_range,
|
||||
new_text: new_text.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
|
||||
@@ -39,8 +39,7 @@ use language::{
|
||||
LanguageToolchainStore, LocalFile, LspAdapter, LspAdapterDelegate, Patch, PointUtf16,
|
||||
TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
|
||||
language_settings::{
|
||||
AllLanguageSettings, FormatOnSave, Formatter, LanguageSettings, LspInsertMode,
|
||||
SelectedFormatter, language_settings,
|
||||
FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, language_settings,
|
||||
},
|
||||
point_to_lsp,
|
||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||
@@ -280,7 +279,7 @@ impl LocalLspStore {
|
||||
let initialization_params = cx.update(|cx| {
|
||||
let mut params = language_server.default_initialize_params(cx);
|
||||
params.initialization_options = initialization_options;
|
||||
adapter.adapter.prepare_initialize_params(params)
|
||||
adapter.adapter.prepare_initialize_params(params, cx)
|
||||
})??;
|
||||
|
||||
Self::setup_lsp_messages(
|
||||
@@ -2635,7 +2634,8 @@ impl LocalLspStore {
|
||||
.into_iter()
|
||||
.map(|edit| (range_from_lsp(edit.range), edit.new_text))
|
||||
.collect::<Vec<_>>();
|
||||
lsp_edits.sort_by_key(|(range, _)| range.start);
|
||||
|
||||
lsp_edits.sort_by_key(|(range, _)| (range.start, range.end));
|
||||
|
||||
let mut lsp_edits = lsp_edits.into_iter().peekable();
|
||||
let mut edits = Vec::new();
|
||||
@@ -3427,6 +3427,9 @@ impl LspStore {
|
||||
|
||||
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
|
||||
client.add_entity_request_handler(Self::handle_lsp_command::<lsp_ext_command::OpenDocs>);
|
||||
client.add_entity_request_handler(
|
||||
Self::handle_lsp_command::<lsp_ext_command::GetLspRunnables>,
|
||||
);
|
||||
client.add_entity_request_handler(
|
||||
Self::handle_lsp_command::<lsp_ext_command::SwitchSourceHeader>,
|
||||
);
|
||||
@@ -5132,7 +5135,6 @@ impl LspStore {
|
||||
&buffer_snapshot,
|
||||
completions.clone(),
|
||||
completion_index,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
@@ -5166,7 +5168,6 @@ impl LspStore {
|
||||
snapshot: &BufferSnapshot,
|
||||
completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
completion_index: usize,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let server_id = server.server_id();
|
||||
let can_resolve = server
|
||||
@@ -5204,41 +5205,38 @@ impl LspStore {
|
||||
};
|
||||
let resolved_completion = request.await?;
|
||||
|
||||
let mut updated_insert_range = None;
|
||||
if let Some(text_edit) = resolved_completion.text_edit.as_ref() {
|
||||
// Technically we don't have to parse the whole `text_edit`, since the only
|
||||
// language server we currently use that does update `text_edit` in `completionItem/resolve`
|
||||
// is `typescript-language-server` and they only update `text_edit.new_text`.
|
||||
// But we should not rely on that.
|
||||
let completion_mode = cx
|
||||
.read_global(|_: &SettingsStore, cx| {
|
||||
AllLanguageSettings::get_global(cx)
|
||||
.defaults
|
||||
.completions
|
||||
.lsp_insert_mode
|
||||
})
|
||||
.unwrap_or(LspInsertMode::Insert);
|
||||
let edit = parse_completion_text_edit(text_edit, snapshot, completion_mode);
|
||||
let edit = parse_completion_text_edit(text_edit, snapshot);
|
||||
|
||||
if let Some((old_range, mut new_text)) = edit {
|
||||
LineEnding::normalize(&mut new_text);
|
||||
if let Some(mut parsed_edit) = edit {
|
||||
LineEnding::normalize(&mut parsed_edit.new_text);
|
||||
|
||||
let mut completions = completions.borrow_mut();
|
||||
let completion = &mut completions[completion_index];
|
||||
|
||||
completion.new_text = new_text;
|
||||
completion.old_range = old_range;
|
||||
completion.new_text = parsed_edit.new_text;
|
||||
completion.replace_range = parsed_edit.replace_range;
|
||||
|
||||
updated_insert_range = parsed_edit.insert_range;
|
||||
}
|
||||
}
|
||||
|
||||
let mut completions = completions.borrow_mut();
|
||||
let completion = &mut completions[completion_index];
|
||||
if let CompletionSource::Lsp {
|
||||
insert_range,
|
||||
lsp_completion,
|
||||
resolved,
|
||||
server_id: completion_server_id,
|
||||
..
|
||||
} = &mut completion.source
|
||||
{
|
||||
*insert_range = updated_insert_range;
|
||||
if *resolved {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -5380,12 +5378,19 @@ impl LspStore {
|
||||
let completion = &mut completions[completion_index];
|
||||
completion.documentation = Some(documentation);
|
||||
if let CompletionSource::Lsp {
|
||||
insert_range,
|
||||
lsp_completion,
|
||||
resolved,
|
||||
server_id: completion_server_id,
|
||||
lsp_defaults: _,
|
||||
} = &mut completion.source
|
||||
{
|
||||
let completion_insert_range = response
|
||||
.old_insert_start
|
||||
.and_then(deserialize_anchor)
|
||||
.zip(response.old_insert_end.and_then(deserialize_anchor));
|
||||
*insert_range = completion_insert_range.map(|(start, end)| start..end);
|
||||
|
||||
if *resolved {
|
||||
return Ok(());
|
||||
}
|
||||
@@ -5397,14 +5402,14 @@ impl LspStore {
|
||||
*resolved = true;
|
||||
}
|
||||
|
||||
let old_range = response
|
||||
.old_start
|
||||
let replace_range = response
|
||||
.old_replace_start
|
||||
.and_then(deserialize_anchor)
|
||||
.zip(response.old_end.and_then(deserialize_anchor));
|
||||
if let Some((old_start, old_end)) = old_range {
|
||||
.zip(response.old_replace_end.and_then(deserialize_anchor));
|
||||
if let Some((old_replace_start, old_replace_end)) = replace_range {
|
||||
if !response.new_text.is_empty() {
|
||||
completion.new_text = response.new_text;
|
||||
completion.old_range = old_start..old_end;
|
||||
completion.replace_range = old_replace_start..old_replace_end;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5429,7 +5434,7 @@ impl LspStore {
|
||||
project_id,
|
||||
buffer_id: buffer_id.into(),
|
||||
completion: Some(Self::serialize_completion(&CoreCompletion {
|
||||
old_range: completion.old_range,
|
||||
replace_range: completion.replace_range,
|
||||
new_text: completion.new_text,
|
||||
source: completion.source,
|
||||
})),
|
||||
@@ -5473,7 +5478,6 @@ impl LspStore {
|
||||
&snapshot,
|
||||
completions.clone(),
|
||||
completion_index,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
.context("resolving completion")?;
|
||||
@@ -5501,7 +5505,7 @@ impl LspStore {
|
||||
buffer.start_transaction();
|
||||
|
||||
for (range, text) in edits {
|
||||
let primary = &completion.old_range;
|
||||
let primary = &completion.replace_range;
|
||||
let start_within = primary.start.cmp(&range.start, buffer).is_le()
|
||||
&& primary.end.cmp(&range.start, buffer).is_ge();
|
||||
let end_within = range.start.cmp(&primary.end, buffer).is_le()
|
||||
@@ -7705,8 +7709,10 @@ impl LspStore {
|
||||
|
||||
// If we have a new buffer_id, that means we're talking to a new client
|
||||
// and want to check for new text_edits in the completion too.
|
||||
let mut old_start = None;
|
||||
let mut old_end = None;
|
||||
let mut old_replace_start = None;
|
||||
let mut old_replace_end = None;
|
||||
let mut old_insert_start = None;
|
||||
let mut old_insert_end = None;
|
||||
let mut new_text = String::default();
|
||||
if let Ok(buffer_id) = BufferId::new(envelope.payload.buffer_id) {
|
||||
let buffer_snapshot = this.update(&mut cx, |this, cx| {
|
||||
@@ -7715,23 +7721,18 @@ impl LspStore {
|
||||
})??;
|
||||
|
||||
if let Some(text_edit) = completion.text_edit.as_ref() {
|
||||
let completion_mode = cx
|
||||
.read_global(|_: &SettingsStore, cx| {
|
||||
AllLanguageSettings::get_global(cx)
|
||||
.defaults
|
||||
.completions
|
||||
.lsp_insert_mode
|
||||
})
|
||||
.unwrap_or(LspInsertMode::Insert);
|
||||
let edit = parse_completion_text_edit(text_edit, &buffer_snapshot);
|
||||
|
||||
let edit = parse_completion_text_edit(text_edit, &buffer_snapshot, completion_mode);
|
||||
if let Some(mut edit) = edit {
|
||||
LineEnding::normalize(&mut edit.new_text);
|
||||
|
||||
if let Some((old_range, mut text_edit_new_text)) = edit {
|
||||
LineEnding::normalize(&mut text_edit_new_text);
|
||||
|
||||
new_text = text_edit_new_text;
|
||||
old_start = Some(serialize_anchor(&old_range.start));
|
||||
old_end = Some(serialize_anchor(&old_range.end));
|
||||
new_text = edit.new_text;
|
||||
old_replace_start = Some(serialize_anchor(&edit.replace_range.start));
|
||||
old_replace_end = Some(serialize_anchor(&edit.replace_range.end));
|
||||
if let Some(insert_range) = edit.insert_range {
|
||||
old_insert_start = Some(serialize_anchor(&insert_range.start));
|
||||
old_insert_end = Some(serialize_anchor(&insert_range.end));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7739,10 +7740,12 @@ impl LspStore {
|
||||
Ok(proto::ResolveCompletionDocumentationResponse {
|
||||
documentation,
|
||||
documentation_is_markdown,
|
||||
old_start,
|
||||
old_end,
|
||||
old_replace_start,
|
||||
old_replace_end,
|
||||
new_text,
|
||||
lsp_completion,
|
||||
old_insert_start,
|
||||
old_insert_end,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -8044,7 +8047,7 @@ impl LspStore {
|
||||
this.apply_additional_edits_for_completion(
|
||||
buffer,
|
||||
Rc::new(RefCell::new(Box::new([Completion {
|
||||
old_range: completion.old_range,
|
||||
replace_range: completion.replace_range,
|
||||
new_text: completion.new_text,
|
||||
source: completion.source,
|
||||
documentation: None,
|
||||
@@ -8367,7 +8370,6 @@ impl LspStore {
|
||||
self.buffer_store.update(cx, |buffer_store, cx| {
|
||||
for buffer in buffer_store.buffers() {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
// TODO kb clean inlays
|
||||
buffer.update_diagnostics(server_id, DiagnosticSet::new([], buffer), cx);
|
||||
buffer.set_completion_triggers(server_id, Default::default(), cx);
|
||||
});
|
||||
@@ -9100,18 +9102,26 @@ impl LspStore {
|
||||
|
||||
pub(crate) fn serialize_completion(completion: &CoreCompletion) -> proto::Completion {
|
||||
let mut serialized_completion = proto::Completion {
|
||||
old_start: Some(serialize_anchor(&completion.old_range.start)),
|
||||
old_end: Some(serialize_anchor(&completion.old_range.end)),
|
||||
old_replace_start: Some(serialize_anchor(&completion.replace_range.start)),
|
||||
old_replace_end: Some(serialize_anchor(&completion.replace_range.end)),
|
||||
new_text: completion.new_text.clone(),
|
||||
..proto::Completion::default()
|
||||
};
|
||||
match &completion.source {
|
||||
CompletionSource::Lsp {
|
||||
insert_range,
|
||||
server_id,
|
||||
lsp_completion,
|
||||
lsp_defaults,
|
||||
resolved,
|
||||
} => {
|
||||
let (old_insert_start, old_insert_end) = insert_range
|
||||
.as_ref()
|
||||
.map(|range| (serialize_anchor(&range.start), serialize_anchor(&range.end)))
|
||||
.unzip();
|
||||
|
||||
serialized_completion.old_insert_start = old_insert_start;
|
||||
serialized_completion.old_insert_end = old_insert_end;
|
||||
serialized_completion.source = proto::completion::Source::Lsp as i32;
|
||||
serialized_completion.server_id = server_id.0 as u64;
|
||||
serialized_completion.lsp_completion = serde_json::to_vec(lsp_completion).unwrap();
|
||||
@@ -9139,20 +9149,31 @@ impl LspStore {
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize_completion(completion: proto::Completion) -> Result<CoreCompletion> {
|
||||
let old_start = completion
|
||||
.old_start
|
||||
let old_replace_start = completion
|
||||
.old_replace_start
|
||||
.and_then(deserialize_anchor)
|
||||
.context("invalid old start")?;
|
||||
let old_end = completion
|
||||
.old_end
|
||||
let old_replace_end = completion
|
||||
.old_replace_end
|
||||
.and_then(deserialize_anchor)
|
||||
.context("invalid old end")?;
|
||||
let insert_range = {
|
||||
match completion.old_insert_start.zip(completion.old_insert_end) {
|
||||
Some((start, end)) => {
|
||||
let start = deserialize_anchor(start).context("invalid insert old start")?;
|
||||
let end = deserialize_anchor(end).context("invalid insert old end")?;
|
||||
Some(start..end)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
};
|
||||
Ok(CoreCompletion {
|
||||
old_range: old_start..old_end,
|
||||
replace_range: old_replace_start..old_replace_end,
|
||||
new_text: completion.new_text,
|
||||
source: match proto::completion::Source::from_i32(completion.source) {
|
||||
Some(proto::completion::Source::Custom) => CompletionSource::Custom,
|
||||
Some(proto::completion::Source::Lsp) => CompletionSource::Lsp {
|
||||
insert_range,
|
||||
server_id: LanguageServerId::from_proto(completion.server_id),
|
||||
lsp_completion: serde_json::from_slice(&completion.lsp_completion)?,
|
||||
lsp_defaults: completion
|
||||
@@ -9341,7 +9362,7 @@ async fn populate_labels_for_completions(
|
||||
completions.push(Completion {
|
||||
label,
|
||||
documentation,
|
||||
old_range: completion.old_range,
|
||||
replace_range: completion.replace_range,
|
||||
new_text: completion.new_text,
|
||||
insert_text_mode: lsp_completion.insert_text_mode,
|
||||
source: completion.source,
|
||||
@@ -9355,7 +9376,7 @@ async fn populate_labels_for_completions(
|
||||
completions.push(Completion {
|
||||
label,
|
||||
documentation: None,
|
||||
old_range: completion.old_range,
|
||||
replace_range: completion.replace_range,
|
||||
new_text: completion.new_text,
|
||||
source: completion.source,
|
||||
insert_text_mode: None,
|
||||
|
||||
@@ -1,12 +1,27 @@
|
||||
use crate::{lsp_command::LspCommand, lsp_store::LspStore, make_text_document_identifier};
|
||||
use crate::{
|
||||
LocationLink,
|
||||
lsp_command::{
|
||||
LspCommand, location_link_from_lsp, location_link_from_proto, location_link_to_proto,
|
||||
},
|
||||
lsp_store::LspStore,
|
||||
make_text_document_identifier,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AsyncApp, Entity};
|
||||
use language::{Buffer, point_to_lsp, proto::deserialize_anchor};
|
||||
use language::{
|
||||
Buffer, point_to_lsp,
|
||||
proto::{deserialize_anchor, serialize_anchor},
|
||||
};
|
||||
use lsp::{LanguageServer, LanguageServerId};
|
||||
use rpc::proto::{self, PeerId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use task::TaskTemplate;
|
||||
use text::{BufferId, PointUtf16, ToPointUtf16};
|
||||
|
||||
pub enum LspExpandMacro {}
|
||||
@@ -363,3 +378,245 @@ impl LspCommand for SwitchSourceHeader {
|
||||
BufferId::new(message.buffer_id)
|
||||
}
|
||||
}
|
||||
|
||||
// https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#runnables
|
||||
// Taken from https://github.com/rust-lang/rust-analyzer/blob/a73a37a757a58b43a796d3eb86a1f7dfd0036659/crates/rust-analyzer/src/lsp/ext.rs#L425-L489
|
||||
pub enum Runnables {}
|
||||
|
||||
impl lsp::request::Request for Runnables {
|
||||
type Params = RunnablesParams;
|
||||
type Result = Vec<Runnable>;
|
||||
const METHOD: &'static str = "experimental/runnables";
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RunnablesParams {
|
||||
pub text_document: lsp::TextDocumentIdentifier,
|
||||
pub position: Option<lsp::Position>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Runnable {
|
||||
pub label: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub location: Option<lsp::LocationLink>,
|
||||
pub kind: RunnableKind,
|
||||
pub args: RunnableArgs,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(untagged)]
|
||||
pub enum RunnableArgs {
|
||||
Cargo(CargoRunnableArgs),
|
||||
Shell(ShellRunnableArgs),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum RunnableKind {
|
||||
Cargo,
|
||||
Shell,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CargoRunnableArgs {
|
||||
#[serde(skip_serializing_if = "HashMap::is_empty")]
|
||||
pub environment: HashMap<String, String>,
|
||||
pub cwd: PathBuf,
|
||||
/// Command to be executed instead of cargo
|
||||
pub override_cargo: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub workspace_root: Option<PathBuf>,
|
||||
// command, --package and --lib stuff
|
||||
pub cargo_args: Vec<String>,
|
||||
// stuff after --
|
||||
pub executable_args: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ShellRunnableArgs {
|
||||
#[serde(skip_serializing_if = "HashMap::is_empty")]
|
||||
pub environment: HashMap<String, String>,
|
||||
pub cwd: PathBuf,
|
||||
pub program: String,
|
||||
pub args: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetLspRunnables {
|
||||
pub buffer_id: BufferId,
|
||||
pub position: Option<text::Anchor>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LspRunnables {
|
||||
pub runnables: Vec<(Option<LocationLink>, TaskTemplate)>,
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspCommand for GetLspRunnables {
|
||||
type Response = LspRunnables;
|
||||
type LspRequest = Runnables;
|
||||
type ProtoRequest = proto::LspExtRunnables;
|
||||
|
||||
fn display_name(&self) -> &str {
|
||||
"LSP Runnables"
|
||||
}
|
||||
|
||||
fn to_lsp(
|
||||
&self,
|
||||
path: &Path,
|
||||
buffer: &Buffer,
|
||||
_: &Arc<LanguageServer>,
|
||||
_: &App,
|
||||
) -> Result<RunnablesParams> {
|
||||
let url = match lsp::Url::from_file_path(path) {
|
||||
Ok(url) => url,
|
||||
Err(()) => anyhow::bail!("Failed to parse path {path:?} as lsp::Url"),
|
||||
};
|
||||
Ok(RunnablesParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(url),
|
||||
position: self
|
||||
.position
|
||||
.map(|anchor| point_to_lsp(anchor.to_point_utf16(&buffer.snapshot()))),
|
||||
})
|
||||
}
|
||||
|
||||
async fn response_from_lsp(
|
||||
self,
|
||||
lsp_runnables: Vec<Runnable>,
|
||||
lsp_store: Entity<LspStore>,
|
||||
buffer: Entity<Buffer>,
|
||||
server_id: LanguageServerId,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<LspRunnables> {
|
||||
let mut runnables = Vec::with_capacity(lsp_runnables.len());
|
||||
|
||||
for runnable in lsp_runnables {
|
||||
let location = match runnable.location {
|
||||
Some(location) => Some(
|
||||
location_link_from_lsp(location, &lsp_store, &buffer, server_id, &mut cx)
|
||||
.await?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
let mut task_template = TaskTemplate::default();
|
||||
task_template.label = runnable.label;
|
||||
match runnable.args {
|
||||
RunnableArgs::Cargo(cargo) => {
|
||||
match cargo.override_cargo {
|
||||
Some(override_cargo) => {
|
||||
let mut override_parts =
|
||||
override_cargo.split(" ").map(|s| s.to_string());
|
||||
task_template.command = override_parts
|
||||
.next()
|
||||
.unwrap_or_else(|| override_cargo.clone());
|
||||
task_template.args.extend(override_parts);
|
||||
}
|
||||
None => task_template.command = "cargo".to_string(),
|
||||
};
|
||||
task_template.env = cargo.environment;
|
||||
task_template.cwd = Some(
|
||||
cargo
|
||||
.workspace_root
|
||||
.unwrap_or(cargo.cwd)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
);
|
||||
task_template.args.extend(cargo.cargo_args);
|
||||
if !cargo.executable_args.is_empty() {
|
||||
task_template.args.push("--".to_string());
|
||||
task_template.args.extend(cargo.executable_args);
|
||||
}
|
||||
}
|
||||
RunnableArgs::Shell(shell) => {
|
||||
task_template.command = shell.program;
|
||||
task_template.args = shell.args;
|
||||
task_template.env = shell.environment;
|
||||
task_template.cwd = Some(shell.cwd.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
runnables.push((location, task_template));
|
||||
}
|
||||
|
||||
Ok(LspRunnables { runnables })
|
||||
}
|
||||
|
||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtRunnables {
|
||||
proto::LspExtRunnables {
|
||||
project_id,
|
||||
buffer_id: buffer.remote_id().to_proto(),
|
||||
position: self.position.as_ref().map(serialize_anchor),
|
||||
}
|
||||
}
|
||||
|
||||
async fn from_proto(
|
||||
message: proto::LspExtRunnables,
|
||||
_: Entity<LspStore>,
|
||||
_: Entity<Buffer>,
|
||||
_: AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let buffer_id = Self::buffer_id_from_proto(&message)?;
|
||||
let position = message.position.and_then(deserialize_anchor);
|
||||
Ok(Self {
|
||||
buffer_id,
|
||||
position,
|
||||
})
|
||||
}
|
||||
|
||||
fn response_to_proto(
|
||||
response: LspRunnables,
|
||||
lsp_store: &mut LspStore,
|
||||
peer_id: PeerId,
|
||||
_: &clock::Global,
|
||||
cx: &mut App,
|
||||
) -> proto::LspExtRunnablesResponse {
|
||||
proto::LspExtRunnablesResponse {
|
||||
runnables: response
|
||||
.runnables
|
||||
.into_iter()
|
||||
.map(|(location, task_template)| proto::LspRunnable {
|
||||
location: location
|
||||
.map(|location| location_link_to_proto(location, lsp_store, peer_id, cx)),
|
||||
task_template: serde_json::to_vec(&task_template).unwrap(),
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn response_from_proto(
|
||||
self,
|
||||
message: proto::LspExtRunnablesResponse,
|
||||
lsp_store: Entity<LspStore>,
|
||||
_: Entity<Buffer>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<LspRunnables> {
|
||||
let mut runnables = LspRunnables {
|
||||
runnables: Vec::new(),
|
||||
};
|
||||
|
||||
for lsp_runnable in message.runnables {
|
||||
let location = match lsp_runnable.location {
|
||||
Some(location) => {
|
||||
Some(location_link_from_proto(location, &lsp_store, &mut cx).await?)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let task_template = serde_json::from_slice(&lsp_runnable.task_template)
|
||||
.context("deserializing task template from proto")?;
|
||||
runnables.runnables.push((location, task_template));
|
||||
}
|
||||
|
||||
Ok(runnables)
|
||||
}
|
||||
|
||||
fn buffer_id_from_proto(message: &proto::LspExtRunnables) -> Result<BufferId> {
|
||||
BufferId::new(message.buffer_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ pub const RUST_ANALYZER_NAME: &str = "rust-analyzer";
|
||||
|
||||
/// Experimental: Informs the end user about the state of the server
|
||||
///
|
||||
/// [Rust Analyzer Specification](https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/lsp-extensions.md#server-status)
|
||||
/// [Rust Analyzer Specification](https://rust-analyzer.github.io/book/contributing/lsp-extensions.html#server-status)
|
||||
#[derive(Debug)]
|
||||
enum ServerStatus {}
|
||||
|
||||
@@ -38,13 +38,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
|
||||
let name = language_server.name();
|
||||
let server_id = language_server.server_id();
|
||||
|
||||
let this = lsp_store;
|
||||
|
||||
language_server
|
||||
.on_notification::<ServerStatus, _>({
|
||||
let name = name.to_string();
|
||||
move |params, cx| {
|
||||
let this = this.clone();
|
||||
let name = name.to_string();
|
||||
if let Some(ref message) = params.message {
|
||||
let message = message.trim();
|
||||
@@ -53,10 +50,10 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
|
||||
"Language server {name} (id {server_id}) status update: {message}"
|
||||
);
|
||||
match params.health {
|
||||
ServerHealthStatus::Ok => log::info!("{}", formatted_message),
|
||||
ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
|
||||
ServerHealthStatus::Ok => log::info!("{formatted_message}"),
|
||||
ServerHealthStatus::Warning => log::warn!("{formatted_message}"),
|
||||
ServerHealthStatus::Error => {
|
||||
log::error!("{}", formatted_message);
|
||||
log::error!("{formatted_message}");
|
||||
let (tx, _rx) = smol::channel::bounded(1);
|
||||
let request = LanguageServerPromptRequest {
|
||||
level: PromptLevel::Critical,
|
||||
@@ -65,7 +62,7 @@ pub fn register_notifications(lsp_store: WeakEntity<LspStore>, language_server:
|
||||
response_channel: tx,
|
||||
lsp_name: name.clone(),
|
||||
};
|
||||
let _ = this
|
||||
lsp_store
|
||||
.update(cx, |_, cx| {
|
||||
cx.emit(LspStoreEvent::LanguageServerPrompt(request));
|
||||
})
|
||||
|
||||
@@ -359,8 +359,14 @@ pub struct InlayHint {
|
||||
#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy)]
|
||||
pub enum CompletionIntent {
|
||||
/// The user intends to 'commit' this result, if possible
|
||||
/// completion confirmations should run side effects
|
||||
/// completion confirmations should run side effects.
|
||||
///
|
||||
/// For LSP completions, will respect the setting `completions.lsp_insert_mode`.
|
||||
Complete,
|
||||
/// Similar to [Self::Complete], but behaves like `lsp_insert_mode` is set to `insert`.
|
||||
CompleteWithInsert,
|
||||
/// Similar to [Self::Complete], but behaves like `lsp_insert_mode` is set to `replace`.
|
||||
CompleteWithReplace,
|
||||
/// The user intends to continue 'composing' this completion
|
||||
/// completion confirmations should not run side effects and
|
||||
/// let the user continue composing their action
|
||||
@@ -377,11 +383,11 @@ impl CompletionIntent {
|
||||
}
|
||||
}
|
||||
|
||||
/// A completion provided by a language server
|
||||
/// Similar to `CoreCompletion`, but with extra metadata attached.
|
||||
#[derive(Clone)]
|
||||
pub struct Completion {
|
||||
/// The range of the buffer that will be replaced.
|
||||
pub old_range: Range<Anchor>,
|
||||
/// The range of text that will be replaced by this completion.
|
||||
pub replace_range: Range<Anchor>,
|
||||
/// The new text that will be inserted.
|
||||
pub new_text: String,
|
||||
/// A label for this completion that is shown in the menu.
|
||||
@@ -404,6 +410,8 @@ pub struct Completion {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CompletionSource {
|
||||
Lsp {
|
||||
/// The alternate `insert` range, if provided by the LSP server.
|
||||
insert_range: Option<Range<Anchor>>,
|
||||
/// The id of the language server that produced this completion.
|
||||
server_id: LanguageServerId,
|
||||
/// The raw completion provided by the language server.
|
||||
@@ -508,7 +516,7 @@ impl CompletionSource {
|
||||
impl std::fmt::Debug for Completion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Completion")
|
||||
.field("old_range", &self.old_range)
|
||||
.field("replace_range", &self.replace_range)
|
||||
.field("new_text", &self.new_text)
|
||||
.field("label", &self.label)
|
||||
.field("documentation", &self.documentation)
|
||||
@@ -517,10 +525,10 @@ impl std::fmt::Debug for Completion {
|
||||
}
|
||||
}
|
||||
|
||||
/// A completion provided by a language server
|
||||
/// A generic completion that can come from different sources.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct CoreCompletion {
|
||||
old_range: Range<Anchor>,
|
||||
replace_range: Range<Anchor>,
|
||||
new_text: String,
|
||||
source: CompletionSource,
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use task::{TaskTemplates, VsCodeTaskFile};
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, serde::default_true};
|
||||
use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId};
|
||||
|
||||
use crate::{
|
||||
@@ -278,12 +278,28 @@ pub struct BinarySettings {
|
||||
pub ignore_system_version: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub struct LspSettings {
|
||||
pub binary: Option<BinarySettings>,
|
||||
pub initialization_options: Option<serde_json::Value>,
|
||||
pub settings: Option<serde_json::Value>,
|
||||
/// If the server supports sending tasks over LSP extensions,
|
||||
/// this setting can be used to enable or disable them in Zed.
|
||||
/// Default: true
|
||||
#[serde(default = "default_true")]
|
||||
pub enable_lsp_tasks: bool,
|
||||
}
|
||||
|
||||
impl Default for LspSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
binary: None,
|
||||
initialization_options: None,
|
||||
settings: None,
|
||||
enable_lsp_tasks: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
|
||||
@@ -459,6 +459,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
|
||||
active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
|
||||
active_worktree_context: None,
|
||||
other_worktree_contexts: Vec::new(),
|
||||
lsp_task_sources: HashMap::default(),
|
||||
latest_selection: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
@@ -481,6 +483,8 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
|
||||
worktree_context
|
||||
})),
|
||||
other_worktree_contexts: Vec::new(),
|
||||
lsp_task_sources: HashMap::default(),
|
||||
latest_selection: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
@@ -797,7 +801,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
.await
|
||||
.text_document,
|
||||
lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
|
||||
lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
|
||||
);
|
||||
assert_eq!(
|
||||
fake_json_server
|
||||
@@ -2663,6 +2667,62 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
init_test(cx);
|
||||
|
||||
let text = "Path()";
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": text
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Simulate the language server sending us a pair of edits at the same location,
|
||||
// with an insertion following a replacement (which violates the LSP spec).
|
||||
let edits = lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.as_local_mut().unwrap().edits_from_lsp(
|
||||
&buffer,
|
||||
[
|
||||
lsp::TextEdit {
|
||||
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
|
||||
new_text: "Path".into(),
|
||||
},
|
||||
lsp::TextEdit {
|
||||
range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
|
||||
new_text: "from path import Path\n\n\n".into(),
|
||||
},
|
||||
],
|
||||
LanguageServerId(0),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -2958,7 +3018,7 @@ async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "textEditText");
|
||||
assert_eq!(
|
||||
completions[0].old_range.to_offset(&snapshot),
|
||||
completions[0].replace_range.to_offset(&snapshot),
|
||||
text.len() - 3..text.len()
|
||||
);
|
||||
}
|
||||
@@ -3041,7 +3101,7 @@ async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "insertText");
|
||||
assert_eq!(
|
||||
completions[0].old_range.to_offset(&snapshot),
|
||||
completions[0].replace_range.to_offset(&snapshot),
|
||||
text.len() - 3..text.len()
|
||||
);
|
||||
}
|
||||
@@ -3083,7 +3143,7 @@ async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "labelText");
|
||||
assert_eq!(
|
||||
completions[0].old_range.to_offset(&snapshot),
|
||||
completions[0].replace_range.to_offset(&snapshot),
|
||||
text.len() - 3..text.len()
|
||||
);
|
||||
}
|
||||
@@ -3153,7 +3213,7 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "fullyQualifiedName");
|
||||
assert_eq!(
|
||||
completions[0].old_range.to_offset(&snapshot),
|
||||
completions[0].replace_range.to_offset(&snapshot),
|
||||
text.len() - 3..text.len()
|
||||
);
|
||||
|
||||
@@ -3180,7 +3240,7 @@ async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(completions.len(), 1);
|
||||
assert_eq!(completions[0].new_text, "component");
|
||||
assert_eq!(
|
||||
completions[0].old_range.to_offset(&snapshot),
|
||||
completions[0].replace_range.to_offset(&snapshot),
|
||||
text.len() - 4..text.len() - 1
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,13 +12,17 @@ use anyhow::Result;
|
||||
use collections::{HashMap, HashSet, VecDeque};
|
||||
use gpui::{App, AppContext as _, Entity, SharedString, Task};
|
||||
use itertools::Itertools;
|
||||
use language::{ContextProvider, File, Language, LanguageToolchainStore, Location};
|
||||
use language::{
|
||||
ContextProvider, File, Language, LanguageToolchainStore, Location,
|
||||
language_settings::language_settings,
|
||||
};
|
||||
use lsp::{LanguageServerId, LanguageServerName};
|
||||
use settings::{InvalidSettingsError, TaskKind, parse_json_with_comments};
|
||||
use task::{
|
||||
DebugTaskDefinition, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates,
|
||||
TaskVariables, VariableName,
|
||||
};
|
||||
use text::{Point, ToPoint};
|
||||
use text::{BufferId, Point, ToPoint};
|
||||
use util::{NumericPrefixWithSuffix, ResultExt as _, paths::PathExt as _, post_inc};
|
||||
use worktree::WorktreeId;
|
||||
|
||||
@@ -55,6 +59,8 @@ pub enum TaskSourceKind {
|
||||
},
|
||||
/// Languages-specific tasks coming from extensions.
|
||||
Language { name: SharedString },
|
||||
/// Language-specific tasks coming from LSP servers.
|
||||
Lsp(LanguageServerId),
|
||||
}
|
||||
|
||||
/// A collection of task contexts, derived from the current state of the workspace.
|
||||
@@ -68,6 +74,8 @@ pub struct TaskContexts {
|
||||
pub active_worktree_context: Option<(WorktreeId, TaskContext)>,
|
||||
/// If there are multiple worktrees in the workspace, all non-active ones are included here.
|
||||
pub other_worktree_contexts: Vec<(WorktreeId, TaskContext)>,
|
||||
pub lsp_task_sources: HashMap<LanguageServerName, Vec<BufferId>>,
|
||||
pub latest_selection: Option<text::Anchor>,
|
||||
}
|
||||
|
||||
impl TaskContexts {
|
||||
@@ -104,18 +112,19 @@ impl TaskContexts {
|
||||
impl TaskSourceKind {
|
||||
pub fn to_id_base(&self) -> String {
|
||||
match self {
|
||||
TaskSourceKind::UserInput => "oneshot".to_string(),
|
||||
TaskSourceKind::AbsPath { id_base, abs_path } => {
|
||||
Self::UserInput => "oneshot".to_string(),
|
||||
Self::AbsPath { id_base, abs_path } => {
|
||||
format!("{id_base}_{}", abs_path.display())
|
||||
}
|
||||
TaskSourceKind::Worktree {
|
||||
Self::Worktree {
|
||||
id,
|
||||
id_base,
|
||||
directory_in_worktree,
|
||||
} => {
|
||||
format!("{id_base}_{id}_{}", directory_in_worktree.display())
|
||||
}
|
||||
TaskSourceKind::Language { name } => format!("language_{name}"),
|
||||
Self::Language { name } => format!("language_{name}"),
|
||||
Self::Lsp(server_id) => format!("lsp_{server_id}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -156,6 +165,11 @@ impl Inventory {
|
||||
});
|
||||
let global_tasks = self.global_templates_from_settings();
|
||||
let language_tasks = language
|
||||
.filter(|language| {
|
||||
language_settings(Some(language.name()), file.as_ref(), cx)
|
||||
.tasks
|
||||
.enabled
|
||||
})
|
||||
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
|
||||
.into_iter()
|
||||
.flat_map(|tasks| tasks.0.into_iter())
|
||||
@@ -171,10 +185,10 @@ impl Inventory {
|
||||
/// Joins the new resolutions with the resolved tasks that were used (spawned) before,
|
||||
/// orders them so that the most recently used come first, all equally used ones are ordered so that the most specific tasks come first.
|
||||
/// Deduplicates the tasks by their labels and context and splits the ordered list into two: used tasks and the rest, newly resolved tasks.
|
||||
pub fn used_and_current_resolved_tasks(
|
||||
&self,
|
||||
task_contexts: &TaskContexts,
|
||||
cx: &App,
|
||||
pub fn used_and_current_resolved_tasks<'a>(
|
||||
&'a self,
|
||||
task_contexts: &'a TaskContexts,
|
||||
cx: &'a App,
|
||||
) -> (
|
||||
Vec<(TaskSourceKind, ResolvedTask)>,
|
||||
Vec<(TaskSourceKind, ResolvedTask)>,
|
||||
@@ -227,7 +241,13 @@ impl Inventory {
|
||||
|
||||
let not_used_score = post_inc(&mut lru_score);
|
||||
let global_tasks = self.global_templates_from_settings();
|
||||
|
||||
let language_tasks = language
|
||||
.filter(|language| {
|
||||
language_settings(Some(language.name()), file.as_ref(), cx)
|
||||
.tasks
|
||||
.enabled
|
||||
})
|
||||
.and_then(|language| language.context_provider()?.associated_tasks(file, cx))
|
||||
.into_iter()
|
||||
.flat_map(|tasks| tasks.0.into_iter())
|
||||
@@ -475,6 +495,7 @@ fn task_lru_comparator(
|
||||
|
||||
fn task_source_kind_preference(kind: &TaskSourceKind) -> u32 {
|
||||
match kind {
|
||||
TaskSourceKind::Lsp(..) => 0,
|
||||
TaskSourceKind::Language { .. } => 1,
|
||||
TaskSourceKind::UserInput => 2,
|
||||
TaskSourceKind::Worktree { .. } => 3,
|
||||
@@ -698,7 +719,7 @@ mod tests {
|
||||
async fn test_task_list_sorting(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let inventory = cx.update(Inventory::new);
|
||||
let initial_tasks = resolved_task_names(&inventory, None, cx).await;
|
||||
let initial_tasks = resolved_task_names(&inventory, None, cx);
|
||||
assert!(
|
||||
initial_tasks.is_empty(),
|
||||
"No tasks expected for empty inventory, but got {initial_tasks:?}"
|
||||
@@ -732,7 +753,7 @@ mod tests {
|
||||
&expected_initial_state,
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_task_names(&inventory, None, cx).await,
|
||||
resolved_task_names(&inventory, None, cx),
|
||||
&expected_initial_state,
|
||||
"Tasks with equal amount of usages should be sorted alphanumerically"
|
||||
);
|
||||
@@ -743,7 +764,7 @@ mod tests {
|
||||
&expected_initial_state,
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_task_names(&inventory, None, cx).await,
|
||||
resolved_task_names(&inventory, None, cx),
|
||||
vec![
|
||||
"2_task".to_string(),
|
||||
"1_a_task".to_string(),
|
||||
@@ -761,7 +782,7 @@ mod tests {
|
||||
&expected_initial_state,
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_task_names(&inventory, None, cx).await,
|
||||
resolved_task_names(&inventory, None, cx),
|
||||
vec![
|
||||
"3_task".to_string(),
|
||||
"1_task".to_string(),
|
||||
@@ -797,7 +818,7 @@ mod tests {
|
||||
&expected_updated_state,
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_task_names(&inventory, None, cx).await,
|
||||
resolved_task_names(&inventory, None, cx),
|
||||
vec![
|
||||
"3_task".to_string(),
|
||||
"1_task".to_string(),
|
||||
@@ -814,7 +835,7 @@ mod tests {
|
||||
&expected_updated_state,
|
||||
);
|
||||
assert_eq!(
|
||||
resolved_task_names(&inventory, None, cx).await,
|
||||
resolved_task_names(&inventory, None, cx),
|
||||
vec![
|
||||
"11_hello".to_string(),
|
||||
"3_task".to_string(),
|
||||
@@ -987,21 +1008,21 @@ mod tests {
|
||||
TaskStore::init(None);
|
||||
}
|
||||
|
||||
async fn resolved_task_names(
|
||||
fn resolved_task_names(
|
||||
inventory: &Entity<Inventory>,
|
||||
worktree: Option<WorktreeId>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Vec<String> {
|
||||
let (used, current) = inventory.update(cx, |inventory, cx| {
|
||||
inventory.update(cx, |inventory, cx| {
|
||||
let mut task_contexts = TaskContexts::default();
|
||||
task_contexts.active_worktree_context =
|
||||
worktree.map(|worktree| (worktree, TaskContext::default()));
|
||||
inventory.used_and_current_resolved_tasks(&task_contexts, cx)
|
||||
});
|
||||
used.into_iter()
|
||||
.chain(current)
|
||||
.map(|(_, task)| task.original_task().label.clone())
|
||||
.collect()
|
||||
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
|
||||
used.into_iter()
|
||||
.chain(current)
|
||||
.map(|(_, task)| task.original_task().label.clone())
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn mock_tasks_from_names<'a>(task_names: impl Iterator<Item = &'a str> + 'a) -> String {
|
||||
@@ -1024,17 +1045,17 @@ mod tests {
|
||||
worktree: Option<WorktreeId>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Vec<(TaskSourceKind, String)> {
|
||||
let (used, current) = inventory.update(cx, |inventory, cx| {
|
||||
inventory.update(cx, |inventory, cx| {
|
||||
let mut task_contexts = TaskContexts::default();
|
||||
task_contexts.active_worktree_context =
|
||||
worktree.map(|worktree| (worktree, TaskContext::default()));
|
||||
inventory.used_and_current_resolved_tasks(&task_contexts, cx)
|
||||
});
|
||||
let mut all = used;
|
||||
all.extend(current);
|
||||
all.into_iter()
|
||||
.map(|(source_kind, task)| (source_kind, task.resolved_label))
|
||||
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
|
||||
.collect()
|
||||
let (used, current) = inventory.used_and_current_resolved_tasks(&task_contexts, cx);
|
||||
let mut all = used;
|
||||
all.extend(current);
|
||||
all.into_iter()
|
||||
.map(|(source_kind, task)| (source_kind, task.resolved_label))
|
||||
.sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,8 +198,8 @@ message ApplyCompletionAdditionalEditsResponse {
|
||||
}
|
||||
|
||||
message Completion {
|
||||
Anchor old_start = 1;
|
||||
Anchor old_end = 2;
|
||||
Anchor old_replace_start = 1;
|
||||
Anchor old_replace_end = 2;
|
||||
string new_text = 3;
|
||||
uint64 server_id = 4;
|
||||
bytes lsp_completion = 5;
|
||||
@@ -208,6 +208,8 @@ message Completion {
|
||||
optional bytes lsp_defaults = 8;
|
||||
optional Anchor buffer_word_start = 9;
|
||||
optional Anchor buffer_word_end = 10;
|
||||
Anchor old_insert_start = 11;
|
||||
Anchor old_insert_end = 12;
|
||||
|
||||
enum Source {
|
||||
Lsp = 0;
|
||||
@@ -428,10 +430,12 @@ message ResolveCompletionDocumentation {
|
||||
message ResolveCompletionDocumentationResponse {
|
||||
string documentation = 1;
|
||||
bool documentation_is_markdown = 2;
|
||||
Anchor old_start = 3;
|
||||
Anchor old_end = 4;
|
||||
Anchor old_replace_start = 3;
|
||||
Anchor old_replace_end = 4;
|
||||
string new_text = 5;
|
||||
bytes lsp_completion = 6;
|
||||
Anchor old_insert_start = 7;
|
||||
Anchor old_insert_end = 8;
|
||||
}
|
||||
|
||||
message ResolveInlayHint {
|
||||
@@ -699,3 +703,18 @@ message LanguageServerIdForName {
|
||||
message LanguageServerIdForNameResponse {
|
||||
optional uint64 server_id = 1;
|
||||
}
|
||||
|
||||
message LspExtRunnables {
|
||||
uint64 project_id = 1;
|
||||
uint64 buffer_id = 2;
|
||||
optional Anchor position = 3;
|
||||
}
|
||||
|
||||
message LspExtRunnablesResponse {
|
||||
repeated LspRunnable runnables = 1;
|
||||
}
|
||||
|
||||
message LspRunnable {
|
||||
bytes task_template = 1;
|
||||
optional LocationLink location = 2;
|
||||
}
|
||||
|
||||
@@ -372,12 +372,15 @@ message Envelope {
|
||||
GetDocumentSymbolsResponse get_document_symbols_response = 331;
|
||||
|
||||
LanguageServerIdForName language_server_id_for_name = 332;
|
||||
LanguageServerIdForNameResponse language_server_id_for_name_response = 333; // current max
|
||||
LanguageServerIdForNameResponse language_server_id_for_name_response = 333;
|
||||
|
||||
LoadCommitDiff load_commit_diff = 334;
|
||||
LoadCommitDiffResponse load_commit_diff_response = 335;
|
||||
|
||||
StopLanguageServers stop_language_servers = 336; // current max
|
||||
StopLanguageServers stop_language_servers = 336;
|
||||
|
||||
LspExtRunnables lsp_ext_runnables = 337;
|
||||
LspExtRunnablesResponse lsp_ext_runnables_response = 338; // current max
|
||||
}
|
||||
|
||||
reserved 87 to 88;
|
||||
|
||||
@@ -171,6 +171,8 @@ messages!(
|
||||
(LspExtExpandMacroResponse, Background),
|
||||
(LspExtOpenDocs, Background),
|
||||
(LspExtOpenDocsResponse, Background),
|
||||
(LspExtRunnables, Background),
|
||||
(LspExtRunnablesResponse, Background),
|
||||
(LspExtSwitchSourceHeader, Background),
|
||||
(LspExtSwitchSourceHeaderResponse, Background),
|
||||
(MarkNotificationRead, Foreground),
|
||||
@@ -414,6 +416,7 @@ request_messages!(
|
||||
(LanguageServerIdForName, LanguageServerIdForNameResponse),
|
||||
(LspExtExpandMacro, LspExtExpandMacroResponse),
|
||||
(LspExtOpenDocs, LspExtOpenDocsResponse),
|
||||
(LspExtRunnables, LspExtRunnablesResponse),
|
||||
(SetRoomParticipantRole, Ack),
|
||||
(BlameBuffer, BlameBufferResponse),
|
||||
(RejoinRemoteProjects, RejoinRemoteProjectsResponse),
|
||||
@@ -537,6 +540,7 @@ entity_messages!(
|
||||
UpdateWorktreeSettings,
|
||||
LspExtExpandMacro,
|
||||
LspExtOpenDocs,
|
||||
LspExtRunnables,
|
||||
AdvertiseContexts,
|
||||
OpenContext,
|
||||
CreateContext,
|
||||
|
||||
@@ -31,10 +31,9 @@ pub trait RequestMessage: EnvelopedMessage {
|
||||
type Response: EnvelopedMessage;
|
||||
}
|
||||
|
||||
pub trait AnyTypedEnvelope: 'static + Send + Sync {
|
||||
pub trait AnyTypedEnvelope: Any + Send + Sync {
|
||||
fn payload_type_id(&self) -> TypeId;
|
||||
fn payload_type_name(&self) -> &'static str;
|
||||
fn as_any(&self) -> &dyn Any;
|
||||
fn into_any(self: Box<Self>) -> Box<dyn Any + Send + Sync>;
|
||||
fn is_background(&self) -> bool;
|
||||
fn original_sender_id(&self) -> Option<PeerId>;
|
||||
@@ -56,10 +55,6 @@ impl<T: EnvelopedMessage> AnyTypedEnvelope for TypedEnvelope<T> {
|
||||
T::NAME
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_any(self: Box<Self>) -> Box<dyn Any + Send + Sync> {
|
||||
self
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::any::Any;
|
||||
use std::collections::BTreeSet;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -1288,11 +1289,8 @@ impl RemoteServerProjects {
|
||||
cx.notify();
|
||||
}));
|
||||
|
||||
let Some(scroll_handle) = scroll_state
|
||||
.scroll_handle()
|
||||
.as_any()
|
||||
.downcast_ref::<ScrollHandle>()
|
||||
else {
|
||||
let handle = &**scroll_state.scroll_handle() as &dyn Any;
|
||||
let Some(scroll_handle) = handle.downcast_ref::<ScrollHandle>() else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ use proto::{
|
||||
error::ErrorExt as _,
|
||||
};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
any::{Any, TypeId},
|
||||
sync::{Arc, Weak},
|
||||
};
|
||||
|
||||
@@ -250,8 +250,7 @@ impl AnyProtoClient {
|
||||
let message_type_id = TypeId::of::<M>();
|
||||
let entity_type_id = TypeId::of::<E>();
|
||||
let entity_id_extractor = |envelope: &dyn AnyTypedEnvelope| {
|
||||
envelope
|
||||
.as_any()
|
||||
(envelope as &dyn Any)
|
||||
.downcast_ref::<TypedEnvelope<M>>()
|
||||
.unwrap()
|
||||
.payload
|
||||
@@ -296,8 +295,7 @@ impl AnyProtoClient {
|
||||
let message_type_id = TypeId::of::<M>();
|
||||
let entity_type_id = TypeId::of::<E>();
|
||||
let entity_id_extractor = |envelope: &dyn AnyTypedEnvelope| {
|
||||
envelope
|
||||
.as_any()
|
||||
(envelope as &dyn Any)
|
||||
.downcast_ref::<TypedEnvelope<M>>()
|
||||
.unwrap()
|
||||
.payload
|
||||
|
||||
@@ -18,9 +18,7 @@ pub enum ComponentStory {
|
||||
ContextMenu,
|
||||
Cursor,
|
||||
DefaultColors,
|
||||
Disclosure,
|
||||
Focus,
|
||||
Icon,
|
||||
IconButton,
|
||||
Keybinding,
|
||||
List,
|
||||
@@ -35,7 +33,6 @@ pub enum ComponentStory {
|
||||
ToggleButton,
|
||||
ViewportUnits,
|
||||
WithRemSize,
|
||||
Vector,
|
||||
}
|
||||
|
||||
impl ComponentStory {
|
||||
@@ -51,9 +48,7 @@ impl ComponentStory {
|
||||
Self::ContextMenu => cx.new(|_| ui::ContextMenuStory).into(),
|
||||
Self::Cursor => cx.new(|_| crate::stories::CursorStory).into(),
|
||||
Self::DefaultColors => DefaultColorsStory::model(cx).into(),
|
||||
Self::Disclosure => cx.new(|_| ui::DisclosureStory).into(),
|
||||
Self::Focus => FocusStory::model(window, cx).into(),
|
||||
Self::Icon => cx.new(|_| ui::IconStory).into(),
|
||||
Self::IconButton => cx.new(|_| ui::IconButtonStory).into(),
|
||||
Self::Keybinding => cx.new(|_| ui::KeybindingStory).into(),
|
||||
Self::List => cx.new(|_| ui::ListStory).into(),
|
||||
@@ -68,7 +63,6 @@ impl ComponentStory {
|
||||
Self::ToggleButton => cx.new(|_| ui::ToggleButtonStory).into(),
|
||||
Self::ViewportUnits => cx.new(|_| crate::stories::ViewportUnitsStory).into(),
|
||||
Self::WithRemSize => cx.new(|_| crate::stories::WithRemSizeStory).into(),
|
||||
Self::Vector => cx.new(|_| ui::VectorStory).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -450,7 +450,7 @@ impl PickerDelegate for TabSwitcherDelegate {
|
||||
IconButton::new("close_tab", IconName::Close)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(indicator_color)
|
||||
.tooltip(Tooltip::text("Close"))
|
||||
.tooltip(Tooltip::for_action_title("Close", &CloseSelectedItem))
|
||||
.on_click(cx.listener(move |picker, _, window, cx| {
|
||||
cx.stop_propagation();
|
||||
picker.delegate.close_item_at(ix, window, cx);
|
||||
|
||||
@@ -13,11 +13,13 @@ path = "src/tasks_ui.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
debugger_ui.workspace = true
|
||||
editor.workspace = true
|
||||
file_icons.workspace = true
|
||||
fuzzy.workspace = true
|
||||
feature_flags.workspace = true
|
||||
itertools.workspace = true
|
||||
gpui.workspace = true
|
||||
menu.workspace = true
|
||||
picker.workspace = true
|
||||
|
||||
@@ -7,6 +7,7 @@ use gpui::{
|
||||
Focusable, InteractiveElement, ParentElement, Render, SharedString, Styled, Subscription, Task,
|
||||
WeakEntity, Window, rems,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
|
||||
use project::{TaskSourceKind, task_store::TaskStore};
|
||||
use task::{
|
||||
@@ -221,42 +222,66 @@ impl PickerDelegate for TasksModalDelegate {
|
||||
cx: &mut Context<picker::Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let task_type = self.task_modal_type.clone();
|
||||
cx.spawn_in(window, async move |picker, cx| {
|
||||
let Some(candidates) = picker
|
||||
.update(cx, |picker, cx| match &mut picker.delegate.candidates {
|
||||
Some(candidates) => string_match_candidates(candidates.iter(), task_type),
|
||||
None => {
|
||||
let Some(task_inventory) = picker
|
||||
.delegate
|
||||
.task_store
|
||||
.read(cx)
|
||||
.task_inventory()
|
||||
.cloned()
|
||||
else {
|
||||
let candidates = match &self.candidates {
|
||||
Some(candidates) => Task::ready(string_match_candidates(candidates, task_type)),
|
||||
None => {
|
||||
if let Some(task_inventory) = self.task_store.read(cx).task_inventory().cloned() {
|
||||
let (used, current) = task_inventory
|
||||
.read(cx)
|
||||
.used_and_current_resolved_tasks(&self.task_contexts, cx);
|
||||
let workspace = self.workspace.clone();
|
||||
let lsp_task_sources = self.task_contexts.lsp_task_sources.clone();
|
||||
let task_position = self.task_contexts.latest_selection;
|
||||
|
||||
cx.spawn(async move |picker, cx| {
|
||||
let Ok(lsp_tasks) = workspace.update(cx, |workspace, cx| {
|
||||
editor::lsp_tasks(
|
||||
workspace.project().clone(),
|
||||
&lsp_task_sources,
|
||||
task_position,
|
||||
cx,
|
||||
)
|
||||
}) else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let (used, current) = task_inventory
|
||||
.read(cx)
|
||||
.used_and_current_resolved_tasks(&picker.delegate.task_contexts, cx);
|
||||
picker.delegate.last_used_candidate_index = if used.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(used.len() - 1)
|
||||
};
|
||||
let lsp_tasks = lsp_tasks.await;
|
||||
picker
|
||||
.update(cx, |picker, _| {
|
||||
picker.delegate.last_used_candidate_index = if used.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(used.len() - 1)
|
||||
};
|
||||
|
||||
let mut new_candidates = used;
|
||||
new_candidates.extend(current);
|
||||
let match_candidates =
|
||||
string_match_candidates(new_candidates.iter(), task_type);
|
||||
let _ = picker.delegate.candidates.insert(new_candidates);
|
||||
match_candidates
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let mut new_candidates = used;
|
||||
new_candidates.extend(lsp_tasks.into_iter().flat_map(
|
||||
|(kind, tasks_with_locations)| {
|
||||
tasks_with_locations
|
||||
.into_iter()
|
||||
.sorted_by_key(|(location, task)| {
|
||||
(location.is_none(), task.resolved_label.clone())
|
||||
})
|
||||
.map(move |(_, task)| (kind.clone(), task))
|
||||
},
|
||||
));
|
||||
new_candidates.extend(current);
|
||||
let match_candidates =
|
||||
string_match_candidates(&new_candidates, task_type);
|
||||
let _ = picker.delegate.candidates.insert(new_candidates);
|
||||
match_candidates
|
||||
})
|
||||
.ok()
|
||||
.unwrap_or_default()
|
||||
})
|
||||
} else {
|
||||
Task::ready(Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
cx.spawn_in(window, async move |picker, cx| {
|
||||
let candidates = candidates.await;
|
||||
let matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
@@ -426,6 +451,7 @@ impl PickerDelegate for TasksModalDelegate {
|
||||
color: Color::Default,
|
||||
};
|
||||
let icon = match source_kind {
|
||||
TaskSourceKind::Lsp(..) => Some(Icon::new(IconName::Bolt)),
|
||||
TaskSourceKind::UserInput => Some(Icon::new(IconName::Terminal)),
|
||||
TaskSourceKind::AbsPath { .. } => Some(Icon::new(IconName::Settings)),
|
||||
TaskSourceKind::Worktree { .. } => Some(Icon::new(IconName::FileTree)),
|
||||
@@ -697,10 +723,11 @@ impl PickerDelegate for TasksModalDelegate {
|
||||
}
|
||||
|
||||
fn string_match_candidates<'a>(
|
||||
candidates: impl Iterator<Item = &'a (TaskSourceKind, ResolvedTask)> + 'a,
|
||||
candidates: impl IntoIterator<Item = &'a (TaskSourceKind, ResolvedTask)> + 'a,
|
||||
task_modal_type: TaskModal,
|
||||
) -> Vec<StringMatchCandidate> {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|(_, (_, candidate))| match candidate.task_type() {
|
||||
TaskType::Script => task_modal_type == TaskModal::ScriptModal,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use collections::HashMap;
|
||||
use debugger_ui::Start;
|
||||
use editor::Editor;
|
||||
use feature_flags::{Debugger, FeatureFlagViewExt};
|
||||
@@ -313,6 +313,17 @@ fn task_contexts(workspace: &Workspace, window: &mut Window, cx: &mut App) -> Ta
|
||||
})
|
||||
});
|
||||
|
||||
let lsp_task_sources = active_editor
|
||||
.as_ref()
|
||||
.map(|active_editor| active_editor.update(cx, |editor, cx| editor.lsp_task_sources(cx)))
|
||||
.unwrap_or_default();
|
||||
|
||||
let latest_selection = active_editor.as_ref().map(|active_editor| {
|
||||
active_editor.update(cx, |editor, _| {
|
||||
editor.selections.newest_anchor().head().text_anchor
|
||||
})
|
||||
});
|
||||
|
||||
let mut worktree_abs_paths = workspace
|
||||
.worktrees(cx)
|
||||
.filter(|worktree| is_visible_directory(worktree, cx))
|
||||
@@ -325,6 +336,9 @@ fn task_contexts(workspace: &Workspace, window: &mut Window, cx: &mut App) -> Ta
|
||||
cx.background_spawn(async move {
|
||||
let mut task_contexts = TaskContexts::default();
|
||||
|
||||
task_contexts.lsp_task_sources = lsp_task_sources;
|
||||
task_contexts.latest_selection = latest_selection;
|
||||
|
||||
if let Some(editor_context_task) = editor_context_task {
|
||||
if let Some(editor_context) = editor_context_task.await {
|
||||
task_contexts.active_item_context =
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::{
|
||||
any::Any,
|
||||
cell::{Cell, RefCell},
|
||||
rc::Rc,
|
||||
};
|
||||
@@ -85,8 +84,4 @@ impl ScrollableHandle for TerminalScrollHandle {
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2231,6 +2231,7 @@ impl BufferSnapshot {
|
||||
} else if *anchor == Anchor::MAX {
|
||||
self.visible_text.len()
|
||||
} else {
|
||||
debug_assert!(anchor.buffer_id == Some(self.remote_id));
|
||||
let anchor_key = InsertionFragmentKey {
|
||||
timestamp: anchor.timestamp,
|
||||
split_offset: anchor.offset,
|
||||
|
||||
@@ -28,6 +28,7 @@ strum.workspace = true
|
||||
theme.workspace = true
|
||||
ui_macros.workspace = true
|
||||
util.workspace = true
|
||||
documented = "0.9.1"
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
|
||||
5
crates/ui/src/component_prelude.rs
Normal file
5
crates/ui/src/component_prelude.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub use component::{
|
||||
Component, ComponentScope, example_group, example_group_with_title, single_example,
|
||||
};
|
||||
pub use documented::Documented;
|
||||
pub use ui_macros::RegisterComponent;
|
||||
@@ -73,7 +73,5 @@ pub use table::*;
|
||||
pub use toggle::*;
|
||||
pub use tooltip::*;
|
||||
|
||||
#[cfg(feature = "stories")]
|
||||
pub use image::story::*;
|
||||
#[cfg(feature = "stories")]
|
||||
pub use stories::*;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user