Compare commits

..

2 Commits

Author SHA1 Message Date
Richard Feldman
1ff8521612 wip 2025-03-20 09:36:42 -04:00
Richard Feldman
ca22d5d4a3 Add shell_parser crate 2025-03-19 22:19:16 -04:00
343 changed files with 16733 additions and 26421 deletions

View File

@@ -19,10 +19,6 @@
# https://github.com/zed-industries/zed/pull/2394 # https://github.com/zed-industries/zed/pull/2394
eca93c124a488b4e538946cd2d313bd571aa2b86 eca93c124a488b4e538946cd2d313bd571aa2b86
# 2024-02-15 Format YAML files
# https://github.com/zed-industries/zed/pull/7887
a161a7d0c95ca7505bf9218bfae640ee5444c88b
# 2024-02-25 Format JSON files in assets/ # 2024-02-25 Format JSON files in assets/
# https://github.com/zed-industries/zed/pull/8405 # https://github.com/zed-industries/zed/pull/8405
ffdda588b41f7d9d270ffe76cab116f828ad545e ffdda588b41f7d9d270ffe76cab116f828ad545e

View File

@@ -235,7 +235,7 @@ jobs:
clean: false clean: false
- name: Cache dependencies - name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "buildjet" cache-provider: "buildjet"
@@ -287,7 +287,7 @@ jobs:
clean: false clean: false
- name: Cache dependencies - name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "buildjet" cache-provider: "buildjet"
@@ -334,7 +334,7 @@ jobs:
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
- name: Cache dependencies - name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
workspaces: ${{ env.ZED_WORKSPACE }} workspaces: ${{ env.ZED_WORKSPACE }}
@@ -393,7 +393,7 @@ jobs:
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
- name: Cache dependencies - name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
workspaces: ${{ env.ZED_WORKSPACE }} workspaces: ${{ env.ZED_WORKSPACE }}

View File

@@ -1,7 +1,7 @@
name: "Close Stale Issues" name: "Close Stale Issues"
on: on:
schedule: schedule:
- cron: "0 7,9,11 * * 3" - cron: "0 7,9,11 * * 2"
workflow_dispatch: workflow_dispatch:
jobs: jobs:

View File

@@ -22,7 +22,7 @@ jobs:
clean: false clean: false
- name: Cache dependencies - name: Cache dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "github" cache-provider: "github"

View File

@@ -182,7 +182,8 @@ jobs:
runner: buildjet-16vcpu-ubuntu-2204 runner: buildjet-16vcpu-ubuntu-2204
install_nix: true install_nix: true
- os: arm Mac - os: arm Mac
runner: [macOS, ARM64, test] # TODO: once other macs are provisioned for nix, remove that constraint from the runner
runner: [macOS, ARM64, nix]
install_nix: false install_nix: false
- os: arm Linux - os: arm Linux
runner: buildjet-16vcpu-ubuntu-2204-arm runner: buildjet-16vcpu-ubuntu-2204-arm

509
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -70,7 +70,6 @@ members = [
"crates/html_to_markdown", "crates/html_to_markdown",
"crates/http_client", "crates/http_client",
"crates/http_client_tls", "crates/http_client_tls",
"crates/icons",
"crates/image_viewer", "crates/image_viewer",
"crates/indexed_docs", "crates/indexed_docs",
"crates/inline_completion", "crates/inline_completion",
@@ -125,12 +124,14 @@ members = [
"crates/rope", "crates/rope",
"crates/rpc", "crates/rpc",
"crates/schema_generator", "crates/schema_generator",
"crates/scripting_tool",
"crates/search", "crates/search",
"crates/semantic_index", "crates/semantic_index",
"crates/semantic_version", "crates/semantic_version",
"crates/session", "crates/session",
"crates/settings", "crates/settings",
"crates/settings_ui", "crates/settings_ui",
"crates/shell_parser",
"crates/snippet", "crates/snippet",
"crates/snippet_provider", "crates/snippet_provider",
"crates/snippets_ui", "crates/snippets_ui",
@@ -171,8 +172,6 @@ members = [
"crates/zed", "crates/zed",
"crates/zed_actions", "crates/zed_actions",
"crates/zeta", "crates/zeta",
"crates/zlog",
"crates/zlog_settings",
# #
# Extensions # Extensions
@@ -275,7 +274,6 @@ gpui_tokio = { path = "crates/gpui_tokio" }
html_to_markdown = { path = "crates/html_to_markdown" } html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" } http_client = { path = "crates/http_client" }
http_client_tls = { path = "crates/http_client_tls" } http_client_tls = { path = "crates/http_client_tls" }
icons = { path = "crates/icons" }
image_viewer = { path = "crates/image_viewer" } image_viewer = { path = "crates/image_viewer" }
indexed_docs = { path = "crates/indexed_docs" } indexed_docs = { path = "crates/indexed_docs" }
inline_completion = { path = "crates/inline_completion" } inline_completion = { path = "crates/inline_completion" }
@@ -330,6 +328,7 @@ reqwest_client = { path = "crates/reqwest_client" }
rich_text = { path = "crates/rich_text" } rich_text = { path = "crates/rich_text" }
rope = { path = "crates/rope" } rope = { path = "crates/rope" }
rpc = { path = "crates/rpc" } rpc = { path = "crates/rpc" }
scripting_tool = { path = "crates/scripting_tool" }
search = { path = "crates/search" } search = { path = "crates/search" }
semantic_index = { path = "crates/semantic_index" } semantic_index = { path = "crates/semantic_index" }
semantic_version = { path = "crates/semantic_version" } semantic_version = { path = "crates/semantic_version" }
@@ -376,8 +375,6 @@ worktree = { path = "crates/worktree" }
zed = { path = "crates/zed" } zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" } zed_actions = { path = "crates/zed_actions" }
zeta = { path = "crates/zeta" } zeta = { path = "crates/zeta" }
zlog = { path = "crates/zlog" }
zlog_settings = { path = "crates/zlog_settings" }
# #
# External crates # External crates
@@ -613,7 +610,7 @@ features = [
] ]
[workspace.dependencies.windows] [workspace.dependencies.windows]
version = "0.61" version = "0.60"
features = [ features = [
"Foundation_Collections", "Foundation_Collections",
"Foundation_Numerics", "Foundation_Numerics",

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-right-left"><path d="m16 3 4 4-4 4"/><path d="M20 7H4"/><path d="m8 21-4-4 4-4"/><path d="M4 17h16"/></svg>

Before

Width:  |  Height:  |  Size: 316 B

View File

@@ -1,3 +0,0 @@
<svg width="8" height="8" viewBox="0 0 8 8" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.4 2.6H5.75C5.75 2.50717 5.71312 2.41815 5.64749 2.35251C5.58185 2.28688 5.49283 2.25 5.4 2.25V2.6ZM2.6 2.25C2.4067 2.25 2.25 2.4067 2.25 2.6C2.25 2.7933 2.4067 2.95 2.6 2.95V2.25ZM5.05 5.4C5.05 5.5933 5.2067 5.75 5.4 5.75C5.5933 5.75 5.75 5.5933 5.75 5.4H5.05ZM2.35252 5.15251C2.21583 5.2892 2.21583 5.5108 2.35252 5.64748C2.4892 5.78417 2.7108 5.78417 2.84749 5.64748L2.35252 5.15251ZM5.4 2.25H2.6V2.95H5.4V2.25ZM5.05 2.6V5.4H5.75V2.6H5.05ZM5.15252 2.35251L2.35252 5.15251L2.84749 5.64748L5.64749 2.84748L5.15252 2.35251Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 650 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-brain"><path d="M12 5a3 3 0 1 0-5.997.125 4 4 0 0 0-2.526 5.77 4 4 0 0 0 .556 6.588A4 4 0 1 0 12 18Z"/><path d="M12 5a3 3 0 1 1 5.997.125 4 4 0 0 1 2.526 5.77 4 4 0 0 1-.556 6.588A4 4 0 1 1 12 18Z"/><path d="M15 13a4.5 4.5 0 0 1-3-4 4.5 4.5 0 0 1-3 4"/><path d="M17.599 6.5a3 3 0 0 0 .399-1.375"/><path d="M6.003 5.125A3 3 0 0 0 6.401 6.5"/><path d="M3.477 10.896a4 4 0 0 1 .585-.396"/><path d="M19.938 10.5a4 4 0 0 1 .585.396"/><path d="M6 18a4 4 0 0 1-1.967-.516"/><path d="M19.967 17.484A4 4 0 0 1 18 18"/></svg>

Before

Width:  |  Height:  |  Size: 718 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-clipboard"><rect width="8" height="4" x="8" y="2" rx="1" ry="1"/><path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2"/></svg>

Before

Width:  |  Height:  |  Size: 358 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-cog"><path d="M12 20a8 8 0 1 0 0-16 8 8 0 0 0 0 16Z"/><path d="M12 14a2 2 0 1 0 0-4 2 2 0 0 0 0 4Z"/><path d="M12 2v2"/><path d="M12 22v-2"/><path d="m17 20.66-1-1.73"/><path d="M11 10.27 7 3.34"/><path d="m20.66 17-1.73-1"/><path d="m3.34 7 1.73 1"/><path d="M14 12h8"/><path d="M2 12h2"/><path d="m20.66 7-1.73 1"/><path d="m3.34 17 1.73-1"/><path d="m17 3.34-1 1.73"/><path d="m11 13.73-4 6.93"/></svg>

Before

Width:  |  Height:  |  Size: 608 B

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.0001 1.33334H4.00008C3.64646 1.33334 3.30732 1.47382 3.05727 1.72387C2.80722 1.97392 2.66675 2.31305 2.66675 2.66668V13.3333C2.66675 13.687 2.80722 14.0261 3.05727 14.2762C3.30732 14.5262 3.64646 14.6667 4.00008 14.6667H12.0001C12.3537 14.6667 12.6928 14.5262 12.9429 14.2762C13.1929 14.0261 13.3334 13.687 13.3334 13.3333V4.66668L10.0001 1.33334Z" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6 8H10" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M8 10V6" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 762 B

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M10.0001 1.33334H4.00008C3.64646 1.33334 3.30732 1.47382 3.05727 1.72387C2.80722 1.97392 2.66675 2.31305 2.66675 2.66668V13.3333C2.66675 13.687 2.80722 14.0261 3.05727 14.2762C3.30732 14.5262 3.64646 14.6667 4.00008 14.6667H12.0001C12.3537 14.6667 12.6928 14.5262 12.9429 14.2762C13.1929 14.0261 13.3334 13.687 13.3334 13.3333V4.66668L10.0001 1.33334Z" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.66659 6.5L6.33325 9.83333" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6.33325 6.5L9.66659 9.83333" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 804 B

View File

@@ -195,7 +195,7 @@
"ctrl-shift-g": "search::SelectPreviousMatch", "ctrl-shift-g": "search::SelectPreviousMatch",
"ctrl-alt-/": "assistant::ToggleModelSelector", "ctrl-alt-/": "assistant::ToggleModelSelector",
"ctrl-k h": "assistant::DeployHistory", "ctrl-k h": "assistant::DeployHistory",
"ctrl-k l": "assistant::OpenPromptLibrary", "ctrl-k l": "assistant::DeployPromptLibrary",
"new": "assistant::NewChat", "new": "assistant::NewChat",
"ctrl-t": "assistant::NewChat", "ctrl-t": "assistant::NewChat",
"ctrl-n": "assistant::NewChat" "ctrl-n": "assistant::NewChat"
@@ -754,8 +754,9 @@
"escape": "git_panel::ToggleFocus", "escape": "git_panel::ToggleFocus",
"ctrl-enter": "git::Commit", "ctrl-enter": "git::Commit",
"alt-enter": "menu::SecondaryConfirm", "alt-enter": "menu::SecondaryConfirm",
"delete": "git::RestoreFile",
"shift-delete": "git::RestoreFile", "shift-delete": "git::RestoreFile",
"ctrl-delete": "git::RestoreFile" "backspace": "git::RestoreFile"
} }
}, },
{ {

View File

@@ -241,7 +241,7 @@
"cmd-shift-g": "search::SelectPreviousMatch", "cmd-shift-g": "search::SelectPreviousMatch",
"cmd-alt-/": "assistant::ToggleModelSelector", "cmd-alt-/": "assistant::ToggleModelSelector",
"cmd-k h": "assistant::DeployHistory", "cmd-k h": "assistant::DeployHistory",
"cmd-k l": "assistant::OpenPromptLibrary", "cmd-k l": "assistant::DeployPromptLibrary",
"cmd-t": "assistant::NewChat", "cmd-t": "assistant::NewChat",
"cmd-n": "assistant::NewChat" "cmd-n": "assistant::NewChat"
} }
@@ -287,9 +287,7 @@
"context": "MessageEditor > Editor", "context": "MessageEditor > Editor",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"enter": "assistant2::Chat", "enter": "assistant2::Chat"
"cmd-g d": "git::Diff",
"shift-escape": "git::ExpandCommitEditor"
} }
}, },
{ {
@@ -803,7 +801,9 @@
"shift-tab": "git_panel::FocusEditor", "shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus", "escape": "git_panel::ToggleFocus",
"cmd-enter": "git::Commit", "cmd-enter": "git::Commit",
"cmd-backspace": "git::RestoreFile" "delete": "git::RestoreFile",
"cmd-backspace": "git::RestoreFile",
"backspace": "git::RestoreFile"
} }
}, },
{ {

View File

@@ -8,27 +8,11 @@ It will be up to you to decide which of these you are doing based on what the us
You should only perform actions that modify the users system if explicitly requested by the user: You should only perform actions that modify the users system if explicitly requested by the user:
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the users system without explicit instruction. - If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the users system without explicit instruction.
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so. - If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
- The editing actions you perform might produce errors or warnings. At the end of your changes, check whether you introduced any problems, and fix them before providing a summary of the changes you made.
- Do not fix errors unrelated to your changes unless the user explicitly asks you to do so.
Be concise and direct in your responses. Be concise and direct in your responses.
The user has opened a project that contains the following root directories/files. Whenever you specify a path in the project, it must be a relative path which begins with one of these root directories/files: The user has opened a project that contains the following root directories/files:
{{#each worktrees}} {{#each worktrees}}
- `{{root_name}}` (absolute path: `{{abs_path}}`) - {{root_name}} (absolute path: {{abs_path}})
{{/each}} {{/each}}
{{#if has_rules}}
There are rules that apply to these root directories:
{{#each worktrees}}
{{#if rules_file}}
`{{root_name}}/{{rules_file.rel_path}}`:
``````
{{{rules_file.text}}}
``````
{{/if}}
{{/each}}
{{/if}}

View File

@@ -25,7 +25,7 @@
// Features that can be globally enabled or disabled // Features that can be globally enabled or disabled
"features": { "features": {
// Which edit prediction provider to use. // Which edit prediction provider to use.
"edit_prediction_provider": "zed" "edit_prediction_provider": "copilot"
}, },
// The name of a font to use for rendering text in the editor // The name of a font to use for rendering text in the editor
"buffer_font_family": "Zed Plex Mono", "buffer_font_family": "Zed Plex Mono",
@@ -184,11 +184,6 @@
// Whether to show the signature help after completion or a bracket pair inserted. // Whether to show the signature help after completion or a bracket pair inserted.
// If `auto_signature_help` is enabled, this setting will be treated as enabled also. // If `auto_signature_help` is enabled, this setting will be treated as enabled also.
"show_signature_help_after_edits": false, "show_signature_help_after_edits": false,
// What to do when go to definition yields no results.
//
// 1. Do nothing: `none`
// 2. Find references for the same symbol: `find_all_references` (default)
"go_to_definition_fallback": "find_all_references",
// Whether to show wrap guides (vertical rulers) in the editor. // Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value // Setting this to true will show a guide at the 'preferred_line_length' value
// if 'soft_wrap' is set to 'preferred_line_length', and will show any // if 'soft_wrap' is set to 'preferred_line_length', and will show any
@@ -427,8 +422,6 @@
"project_panel": { "project_panel": {
// Whether to show the project panel button in the status bar // Whether to show the project panel button in the status bar
"button": true, "button": true,
// Whether to hide the gitignore entries in the project panel.
"hide_gitignore": false,
// Default width of the project panel. // Default width of the project panel.
"default_width": 240, "default_width": 240,
// Where to dock the project panel. Can be 'left' or 'right'. // Where to dock the project panel. Can be 'left' or 'right'.
@@ -621,43 +614,7 @@
"provider": "zed.dev", "provider": "zed.dev",
// The model to use. // The model to use.
"model": "claude-3-5-sonnet-latest" "model": "claude-3-5-sonnet-latest"
}, }
"profiles": {
"read-only": {
"name": "Read-only",
"tools": {
"diagnostics": true,
"fetch": true,
"list-directory": true,
"now": true,
"path-search": true,
"read-file": true,
"regex-search": true,
"thinking": true
}
},
"code-writer": {
"name": "Code Writer",
"tools": {
"bash": true,
"copy-path": true,
"create-file": true,
"delete-path": true,
"diagnostics": true,
"find-replace-file": true,
"edit-files": false,
"fetch": true,
"list-directory": true,
"move-path": true,
"now": true,
"path-search": true,
"read-file": true,
"regex-search": true,
"thinking": true
}
}
},
"notify_when_agent_waiting": true
}, },
// The settings for slash commands. // The settings for slash commands.
"slash_commands": { "slash_commands": {
@@ -1024,7 +981,7 @@
// "alternate_scroll": "on", // "alternate_scroll": "on",
// 2. Default alternate scroll mode to off // 2. Default alternate scroll mode to off
// "alternate_scroll": "off", // "alternate_scroll": "off",
"alternate_scroll": "on", "alternate_scroll": "off",
// Set whether the option key behaves as the meta key. // Set whether the option key behaves as the meta key.
// May take 2 values: // May take 2 values:
// 1. Rely on default platform handling of option key, on macOS // 1. Rely on default platform handling of option key, on macOS
@@ -1267,19 +1224,11 @@
"allowed": true "allowed": true
} }
}, },
"LaTeX": {
"format_on_save": "on",
"formatter": "language_server",
"language_servers": ["texlab", "..."],
"prettier": {
"allowed": false
}
},
"Markdown": { "Markdown": {
"format_on_save": "off", "format_on_save": "off",
"use_on_type_format": false, "use_on_type_format": false,
"allow_rewrap": "anywhere", "allow_rewrap": "anywhere",
"soft_wrap": "editor_width", "soft_wrap": "bounded",
"prettier": { "prettier": {
"allowed": true "allowed": true
} }

View File

@@ -24,16 +24,6 @@ pub struct AnthropicModelCacheConfiguration {
pub max_cache_anchors: usize, pub max_cache_anchors: usize,
} }
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
pub enum AnthropicModelMode {
#[default]
Default,
Thinking {
budget_tokens: Option<u32>,
},
}
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model { pub enum Model {
@@ -42,11 +32,6 @@ pub enum Model {
Claude3_5Sonnet, Claude3_5Sonnet,
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")] #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
Claude3_7Sonnet, Claude3_7Sonnet,
#[serde(
rename = "claude-3-7-sonnet-thinking",
alias = "claude-3-7-sonnet-thinking-latest"
)]
Claude3_7SonnetThinking,
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")] #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
Claude3_5Haiku, Claude3_5Haiku,
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")] #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
@@ -69,8 +54,6 @@ pub enum Model {
default_temperature: Option<f32>, default_temperature: Option<f32>,
#[serde(default)] #[serde(default)]
extra_beta_headers: Vec<String>, extra_beta_headers: Vec<String>,
#[serde(default)]
mode: AnthropicModelMode,
}, },
} }
@@ -78,8 +61,6 @@ impl Model {
pub fn from_id(id: &str) -> Result<Self> { pub fn from_id(id: &str) -> Result<Self> {
if id.starts_with("claude-3-5-sonnet") { if id.starts_with("claude-3-5-sonnet") {
Ok(Self::Claude3_5Sonnet) Ok(Self::Claude3_5Sonnet)
} else if id.starts_with("claude-3-7-sonnet-thinking") {
Ok(Self::Claude3_7SonnetThinking)
} else if id.starts_with("claude-3-7-sonnet") { } else if id.starts_with("claude-3-7-sonnet") {
Ok(Self::Claude3_7Sonnet) Ok(Self::Claude3_7Sonnet)
} else if id.starts_with("claude-3-5-haiku") { } else if id.starts_with("claude-3-5-haiku") {
@@ -99,20 +80,6 @@ impl Model {
match self { match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest", Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest", Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
Model::Claude3Opus => "claude-3-opus-latest",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
Model::Claude3Haiku => "claude-3-haiku-20240307",
Self::Custom { name, .. } => name,
}
}
/// The id of the model that should be used for making API requests
pub fn request_id(&self) -> &str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
Model::Claude3_5Haiku => "claude-3-5-haiku-latest", Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
Model::Claude3Opus => "claude-3-opus-latest", Model::Claude3Opus => "claude-3-opus-latest",
Model::Claude3Sonnet => "claude-3-sonnet-20240229", Model::Claude3Sonnet => "claude-3-sonnet-20240229",
@@ -125,7 +92,6 @@ impl Model {
match self { match self {
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet", Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet", Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
Self::Claude3_5Haiku => "Claude 3.5 Haiku", Self::Claude3_5Haiku => "Claude 3.5 Haiku",
Self::Claude3Opus => "Claude 3 Opus", Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet", Self::Claude3Sonnet => "Claude 3 Sonnet",
@@ -141,7 +107,6 @@ impl Model {
Self::Claude3_5Sonnet Self::Claude3_5Sonnet
| Self::Claude3_5Haiku | Self::Claude3_5Haiku
| Self::Claude3_7Sonnet | Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
| Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration { | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
min_total_token: 2_048, min_total_token: 2_048,
should_speculate: true, should_speculate: true,
@@ -160,7 +125,6 @@ impl Model {
Self::Claude3_5Sonnet Self::Claude3_5Sonnet
| Self::Claude3_5Haiku | Self::Claude3_5Haiku
| Self::Claude3_7Sonnet | Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
| Self::Claude3Opus | Self::Claude3Opus
| Self::Claude3Sonnet | Self::Claude3Sonnet
| Self::Claude3Haiku => 200_000, | Self::Claude3Haiku => 200_000,
@@ -171,10 +135,7 @@ impl Model {
pub fn max_output_tokens(&self) -> u32 { pub fn max_output_tokens(&self) -> u32 {
match self { match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096, Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
Self::Claude3_5Sonnet Self::Claude3_5Sonnet | Self::Claude3_7Sonnet | Self::Claude3_5Haiku => 8_192,
| Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
| Self::Claude3_5Haiku => 8_192,
Self::Custom { Self::Custom {
max_output_tokens, .. max_output_tokens, ..
} => max_output_tokens.unwrap_or(4_096), } => max_output_tokens.unwrap_or(4_096),
@@ -185,7 +146,6 @@ impl Model {
match self { match self {
Self::Claude3_5Sonnet Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet | Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
| Self::Claude3_5Haiku | Self::Claude3_5Haiku
| Self::Claude3Opus | Self::Claude3Opus
| Self::Claude3Sonnet | Self::Claude3Sonnet
@@ -197,21 +157,6 @@ impl Model {
} }
} }
pub fn mode(&self) -> AnthropicModelMode {
match self {
Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet
| Self::Claude3_5Haiku
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => AnthropicModelMode::Default,
Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
budget_tokens: Some(4_096),
},
Self::Custom { mode, .. } => mode.clone(),
}
}
pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"]; pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
pub fn beta_headers(&self) -> String { pub fn beta_headers(&self) -> String {
@@ -243,7 +188,7 @@ impl Model {
{ {
tool_override tool_override
} else { } else {
self.request_id() self.id()
} }
} }
} }
@@ -464,8 +409,6 @@ pub async fn extract_tool_args_from_events(
Err(error) => Some(Err(error)), Err(error) => Some(Err(error)),
Ok(Event::ContentBlockDelta { index, delta }) => match delta { Ok(Event::ContentBlockDelta { index, delta }) => match delta {
ContentDelta::TextDelta { .. } => None, ContentDelta::TextDelta { .. } => None,
ContentDelta::ThinkingDelta { .. } => None,
ContentDelta::SignatureDelta { .. } => None,
ContentDelta::InputJsonDelta { partial_json } => { ContentDelta::InputJsonDelta { partial_json } => {
if index == tool_use_index { if index == tool_use_index {
Some(Ok(partial_json)) Some(Ok(partial_json))
@@ -544,10 +487,6 @@ pub enum RequestContent {
pub enum ResponseContent { pub enum ResponseContent {
#[serde(rename = "text")] #[serde(rename = "text")]
Text { text: String }, Text { text: String },
#[serde(rename = "thinking")]
Thinking { thinking: String },
#[serde(rename = "redacted_thinking")]
RedactedThinking { data: String },
#[serde(rename = "tool_use")] #[serde(rename = "tool_use")]
ToolUse { ToolUse {
id: String, id: String,
@@ -579,12 +518,6 @@ pub enum ToolChoice {
Tool { name: String }, Tool { name: String },
} }
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum Thinking {
Enabled { budget_tokens: Option<u32> },
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Request { pub struct Request {
pub model: String, pub model: String,
@@ -593,8 +526,6 @@ pub struct Request {
#[serde(default, skip_serializing_if = "Vec::is_empty")] #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub tools: Vec<Tool>, pub tools: Vec<Tool>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub thinking: Option<Thinking>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>, pub tool_choice: Option<ToolChoice>,
#[serde(default, skip_serializing_if = "Option::is_none")] #[serde(default, skip_serializing_if = "Option::is_none")]
pub system: Option<String>, pub system: Option<String>,
@@ -678,10 +609,6 @@ pub enum Event {
pub enum ContentDelta { pub enum ContentDelta {
#[serde(rename = "text_delta")] #[serde(rename = "text_delta")]
TextDelta { text: String }, TextDelta { text: String },
#[serde(rename = "thinking_delta")]
ThinkingDelta { thinking: String },
#[serde(rename = "signature_delta")]
SignatureDelta { signature: String },
#[serde(rename = "input_json_delta")] #[serde(rename = "input_json_delta")]
InputJsonDelta { partial_json: String }, InputJsonDelta { partial_json: String },
} }

View File

@@ -188,7 +188,7 @@ impl AskPassSession {
} }
pub async fn run(&mut self) -> AskPassResult { pub async fn run(&mut self) -> AskPassResult {
futures::FutureExt::fuse(smol::Timer::after(Duration::from_secs(20))).await; futures::FutureExt::fuse(smol::Timer::after(Duration::from_secs(10))).await;
AskPassResult::Timedout AskPassResult::Timedout
} }
} }

View File

@@ -38,7 +38,7 @@ use workspace::{
dock::{DockPosition, Panel, PanelEvent}, dock::{DockPosition, Panel, PanelEvent},
pane, DraggedSelection, Pane, ShowConfiguration, ToggleZoom, Workspace, pane, DraggedSelection, Pane, ShowConfiguration, ToggleZoom, Workspace,
}; };
use zed_actions::assistant::{InlineAssist, OpenPromptLibrary, ToggleFocus}; use zed_actions::assistant::{DeployPromptLibrary, InlineAssist, ToggleFocus};
pub fn init(cx: &mut App) { pub fn init(cx: &mut App) {
workspace::FollowableViewRegistry::register::<ContextEditor>(cx); workspace::FollowableViewRegistry::register::<ContextEditor>(cx);
@@ -259,7 +259,7 @@ impl AssistantPanel {
menu.context(focus_handle.clone()) menu.context(focus_handle.clone())
.action("New Chat", Box::new(NewChat)) .action("New Chat", Box::new(NewChat))
.action("History", Box::new(DeployHistory)) .action("History", Box::new(DeployHistory))
.action("Prompt Library", Box::new(OpenPromptLibrary)) .action("Prompt Library", Box::new(DeployPromptLibrary))
.action("Configure", Box::new(ShowConfiguration)) .action("Configure", Box::new(ShowConfiguration))
.action(zoom_label, Box::new(ToggleZoom)) .action(zoom_label, Box::new(ToggleZoom))
})) }))
@@ -1028,7 +1028,7 @@ impl AssistantPanel {
fn deploy_prompt_library( fn deploy_prompt_library(
&mut self, &mut self,
_: &OpenPromptLibrary, _: &DeployPromptLibrary,
_window: &mut Window, _window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {

View File

@@ -232,7 +232,7 @@ impl InlineAssistant {
) { ) {
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| { let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
( (
editor.snapshot(window, cx), editor.buffer().read(cx).snapshot(cx),
editor.selections.all::<Point>(cx), editor.selections.all::<Point>(cx),
) )
}); });
@@ -246,37 +246,7 @@ impl InlineAssistant {
if selection.end.column == 0 { if selection.end.column == 0 {
selection.end.row -= 1; selection.end.row -= 1;
} }
selection.end.column = snapshot selection.end.column = snapshot.line_len(MultiBufferRow(selection.end.row));
.buffer_snapshot
.line_len(MultiBufferRow(selection.end.row));
} else if let Some(fold) =
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
{
selection.start = fold.range().start;
selection.end = fold.range().end;
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot.max_row() {
let chars = snapshot
.buffer_snapshot
.chars_at(Point::new(selection.end.row + 1, 0));
for c in chars {
if c == '\n' {
break;
}
if c.is_whitespace() {
continue;
}
if snapshot
.language_at(selection.end)
.is_some_and(|language| language.config().brackets.is_closing_brace(c))
{
selection.end.row += 1;
selection.end.column = snapshot
.buffer_snapshot
.line_len(MultiBufferRow(selection.end.row));
}
}
}
} }
if let Some(prev_selection) = selections.last_mut() { if let Some(prev_selection) = selections.last_mut() {
@@ -292,7 +262,6 @@ impl InlineAssistant {
} }
selections.push(selection); selections.push(selection);
} }
let snapshot = &snapshot.buffer_snapshot;
let newest_selection = newest_selection.unwrap(); let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new(); let mut codegen_ranges = Vec::new();

View File

@@ -44,7 +44,6 @@ gpui.workspace = true
heed.workspace = true heed.workspace = true
html_to_markdown.workspace = true html_to_markdown.workspace = true
http_client.workspace = true http_client.workspace = true
indexmap.workspace = true
itertools.workspace = true itertools.workspace = true
language.workspace = true language.workspace = true
language_model.workspace = true language_model.workspace = true
@@ -61,12 +60,11 @@ project.workspace = true
prompt_library.workspace = true prompt_library.workspace = true
prompt_store.workspace = true prompt_store.workspace = true
proto.workspace = true proto.workspace = true
release_channel.workspace = true
rope.workspace = true rope.workspace = true
scripting_tool.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
settings.workspace = true settings.workspace = true
smallvec.workspace = true
smol.workspace = true smol.workspace = true
streaming_diff.workspace = true streaming_diff.workspace = true
telemetry.workspace = true telemetry.workspace = true

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,59 @@
use std::sync::Arc;
use collections::HashMap;
use gpui::SharedString;
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: HashMap<Arc<str>, bool>,
#[allow(dead_code)]
pub context_servers: HashMap<Arc<str>, ContextServerPreset>,
}
#[derive(Debug, Clone)]
pub struct ContextServerPreset {
#[allow(dead_code)]
pub tools: HashMap<Arc<str>, bool>,
}
impl AgentProfile {
pub fn read_only() -> Self {
Self {
name: "Read-only".into(),
tools: HashMap::from_iter([
("diagnostics".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
pub fn code_writer() -> Self {
Self {
name: "Code Writer".into(),
tools: HashMap::from_iter([
("bash".into(), true),
("delete-path".into(), true),
("diagnostics".into(), true),
("edit-files".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
}

View File

@@ -1,4 +1,5 @@
mod active_thread; mod active_thread;
mod agent_profile;
mod assistant_configuration; mod assistant_configuration;
mod assistant_model_selector; mod assistant_model_selector;
mod assistant_panel; mod assistant_panel;
@@ -32,7 +33,6 @@ use prompt_store::PromptBuilder;
use settings::Settings as _; use settings::Settings as _;
pub use crate::active_thread::ActiveThread; pub use crate::active_thread::ActiveThread;
use crate::assistant_configuration::AddContextServerModal;
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate}; pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
pub use crate::inline_assistant::InlineAssistant; pub use crate::inline_assistant::InlineAssistant;
pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent}; pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent};
@@ -47,7 +47,6 @@ actions!(
RemoveAllContext, RemoveAllContext,
OpenHistory, OpenHistory,
OpenConfiguration, OpenConfiguration,
AddContextServer,
RemoveSelectedThread, RemoveSelectedThread,
Chat, Chat,
ChatMode, ChatMode,
@@ -88,7 +87,6 @@ pub fn init(
client.telemetry().clone(), client.telemetry().clone(),
cx, cx,
); );
cx.observe_new(AddContextServerModal::register).detach();
feature_gate_assistant2_actions(cx); feature_gate_assistant2_actions(cx);
} }

View File

@@ -1,5 +1,3 @@
mod add_context_server_modal;
use std::sync::Arc; use std::sync::Arc;
use assistant_tool::{ToolSource, ToolWorkingSet}; use assistant_tool::{ToolSource, ToolWorkingSet};
@@ -7,14 +5,13 @@ use collections::HashMap;
use context_server::manager::ContextServerManager; use context_server::manager::ContextServerManager;
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription}; use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry}; use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use ui::{prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch}; use ui::{
prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip,
};
use util::ResultExt as _; use util::ResultExt as _;
use zed_actions::assistant::DeployPromptLibrary;
use zed_actions::ExtensionCategoryFilter; use zed_actions::ExtensionCategoryFilter;
pub(crate) use add_context_server_modal::AddContextServerModal;
use crate::AddContextServer;
pub struct AssistantConfiguration { pub struct AssistantConfiguration {
focus_handle: FocusHandle, focus_handle: FocusHandle,
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>, configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
@@ -173,6 +170,7 @@ impl AssistantConfiguration {
v_flex() v_flex()
.p(DynamicSpacing::Base16.rems(cx)) .p(DynamicSpacing::Base16.rems(cx))
.mt_1()
.gap_2() .gap_2()
.flex_1() .flex_1()
.child( .child(
@@ -197,7 +195,6 @@ impl AssistantConfiguration {
let tool_count = tools.len(); let tool_count = tools.len();
v_flex() v_flex()
.id(SharedString::from(context_server.id()))
.border_1() .border_1()
.rounded_sm() .rounded_sm()
.border_color(cx.theme().colors().border) .border_color(cx.theme().colors().border)
@@ -311,9 +308,8 @@ impl AssistantConfiguration {
.icon(IconName::Plus) .icon(IconName::Plus)
.icon_size(IconSize::Small) .icon_size(IconSize::Small)
.icon_position(IconPosition::Start) .icon_position(IconPosition::Start)
.on_click(|_event, window, cx| { .disabled(true)
window.dispatch_action(AddContextServer.boxed_clone(), cx) .tooltip(Tooltip::text("Not yet implemented")),
}),
), ),
) )
.child( .child(
@@ -355,6 +351,33 @@ impl Render for AssistantConfiguration {
.bg(cx.theme().colors().panel_background) .bg(cx.theme().colors().panel_background)
.size_full() .size_full()
.overflow_y_scroll() .overflow_y_scroll()
.child(
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.gap_2()
.child(
v_flex()
.gap_0p5()
.child(Headline::new("Prompt Library").size(HeadlineSize::Small))
.child(
Label::new("Create reusable prompts and tag which ones you want sent in every LLM interaction.")
.color(Color::Muted),
),
)
.child(
Button::new("open-prompt-library", "Open Prompt Library")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::Book)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.on_click(|_event, window, cx| {
window.dispatch_action(DeployPromptLibrary.boxed_clone(), cx)
}),
),
)
.child(Divider::horizontal().color(DividerColor::Border))
.child(self.render_context_servers_section(cx)) .child(self.render_context_servers_section(cx))
.child(Divider::horizontal().color(DividerColor::Border)) .child(Divider::horizontal().color(DividerColor::Border))
.child( .child(

View File

@@ -1,164 +0,0 @@
use context_server::{ContextServerSettings, ServerCommand, ServerConfig};
use editor::Editor;
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, WeakEntity};
use serde_json::json;
use settings::update_settings_file;
use ui::{prelude::*, Modal, ModalFooter, ModalHeader, Section, Tooltip};
use workspace::{ModalView, Workspace};
use crate::AddContextServer;
pub struct AddContextServerModal {
workspace: WeakEntity<Workspace>,
name_editor: Entity<Editor>,
command_editor: Entity<Editor>,
}
impl AddContextServerModal {
pub fn register(
workspace: &mut Workspace,
_window: Option<&mut Window>,
_cx: &mut Context<Workspace>,
) {
workspace.register_action(|workspace, _: &AddContextServer, window, cx| {
let workspace_handle = cx.entity().downgrade();
workspace.toggle_modal(window, cx, |window, cx| {
Self::new(workspace_handle, window, cx)
})
});
}
pub fn new(
workspace: WeakEntity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let name_editor = cx.new(|cx| Editor::single_line(window, cx));
let command_editor = cx.new(|cx| Editor::single_line(window, cx));
name_editor.update(cx, |editor, cx| {
editor.set_placeholder_text("Context server name", cx);
});
command_editor.update(cx, |editor, cx| {
editor.set_placeholder_text("Command to run the context server", cx);
});
Self {
name_editor,
command_editor,
workspace,
}
}
fn confirm(&mut self, cx: &mut Context<Self>) {
let name = self.name_editor.read(cx).text(cx).trim().to_string();
let command = self.command_editor.read(cx).text(cx).trim().to_string();
if name.is_empty() || command.is_empty() {
return;
}
let mut command_parts = command.split(' ').map(|part| part.trim().to_string());
let Some(path) = command_parts.next() else {
return;
};
let args = command_parts.collect::<Vec<_>>();
if let Some(workspace) = self.workspace.upgrade() {
workspace.update(cx, |workspace, cx| {
let fs = workspace.app_state().fs.clone();
update_settings_file::<ContextServerSettings>(fs.clone(), cx, |settings, _| {
settings.context_servers.insert(
name.into(),
ServerConfig {
command: Some(ServerCommand {
path,
args,
env: None,
}),
settings: Some(json!({})),
},
);
});
});
}
cx.emit(DismissEvent);
}
fn cancel(&mut self, cx: &mut Context<Self>) {
cx.emit(DismissEvent);
}
}
impl ModalView for AddContextServerModal {}
impl Focusable for AddContextServerModal {
fn focus_handle(&self, cx: &App) -> FocusHandle {
self.name_editor.focus_handle(cx).clone()
}
}
impl EventEmitter<DismissEvent> for AddContextServerModal {}
impl Render for AddContextServerModal {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let is_name_empty = self.name_editor.read(cx).text(cx).trim().is_empty();
let is_command_empty = self.command_editor.read(cx).text(cx).trim().is_empty();
div()
.elevation_3(cx)
.w(rems(34.))
.key_context("AddContextServerModal")
.on_action(cx.listener(|this, _: &menu::Cancel, _window, cx| this.cancel(cx)))
.on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| this.confirm(cx)))
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
this.focus_handle(cx).focus(window);
}))
.on_mouse_down_out(cx.listener(|_this, _, _, cx| cx.emit(DismissEvent)))
.child(
Modal::new("add-context-server", None)
.header(ModalHeader::new().headline("Add Context Server"))
.section(
Section::new()
.child(
v_flex()
.gap_1()
.child(Label::new("Name"))
.child(self.name_editor.clone()),
)
.child(
v_flex()
.gap_1()
.child(Label::new("Command"))
.child(self.command_editor.clone()),
),
)
.footer(
ModalFooter::new()
.start_slot(
Button::new("cancel", "Cancel").on_click(
cx.listener(|this, _event, _window, cx| this.cancel(cx)),
),
)
.end_slot(
Button::new("add-server", "Add Server")
.disabled(is_name_empty || is_command_empty)
.map(|button| {
if is_name_empty {
button.tooltip(Tooltip::text("Name is required"))
} else if is_command_empty {
button.tooltip(Tooltip::text("Command is required"))
} else {
button
}
})
.on_click(
cx.listener(|this, _event, _window, cx| this.confirm(cx)),
),
),
),
)
}
}

View File

@@ -14,9 +14,9 @@ use client::zed_urls;
use editor::{Editor, MultiBuffer}; use editor::{Editor, MultiBuffer};
use fs::Fs; use fs::Fs;
use gpui::{ use gpui::{
action_with_deprecated_aliases, prelude::*, Action, AnyElement, App, AsyncWindowContext, prelude::*, Action, AnyElement, App, AsyncWindowContext, Corner, Entity, EventEmitter,
Corner, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, KeyContext, Pixels, FocusHandle, Focusable, FontWeight, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
Subscription, Task, UpdateGlobal, WeakEntity, WeakEntity,
}; };
use language::LanguageRegistry; use language::LanguageRegistry;
use language_model::{LanguageModelProviderTosView, LanguageModelRegistry}; use language_model::{LanguageModelProviderTosView, LanguageModelRegistry};
@@ -29,7 +29,7 @@ use ui::{prelude::*, ContextMenu, KeyBinding, PopoverMenu, PopoverMenuHandle, Ta
use util::ResultExt as _; use util::ResultExt as _;
use workspace::dock::{DockPosition, Panel, PanelEvent}; use workspace::dock::{DockPosition, Panel, PanelEvent};
use workspace::Workspace; use workspace::Workspace;
use zed_actions::assistant::ToggleFocus; use zed_actions::assistant::{DeployPromptLibrary, ToggleFocus};
use crate::active_thread::ActiveThread; use crate::active_thread::ActiveThread;
use crate::assistant_configuration::{AssistantConfiguration, AssistantConfigurationEvent}; use crate::assistant_configuration::{AssistantConfiguration, AssistantConfigurationEvent};
@@ -43,12 +43,6 @@ use crate::{
OpenHistory, OpenHistory,
}; };
action_with_deprecated_aliases!(
assistant,
OpenPromptLibrary,
["assistant::DeployPromptLibrary"]
);
pub fn init(cx: &mut App) { pub fn init(cx: &mut App) {
cx.observe_new( cx.observe_new(
|workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| { |workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| {
@@ -71,14 +65,6 @@ pub fn init(cx: &mut App) {
panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx)); panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx));
} }
}) })
.register_action(|workspace, _: &OpenPromptLibrary, window, cx| {
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
workspace.focus_panel::<AssistantPanel>(window, cx);
panel.update(cx, |panel, cx| {
panel.deploy_prompt_library(&OpenPromptLibrary, window, cx)
});
}
})
.register_action(|workspace, _: &OpenConfiguration, window, cx| { .register_action(|workspace, _: &OpenConfiguration, window, cx| {
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) { if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
workspace.focus_panel::<AssistantPanel>(window, cx); workspace.focus_panel::<AssistantPanel>(window, cx);
@@ -188,7 +174,6 @@ impl AssistantPanel {
thread_store.clone(), thread_store.clone(),
language_registry.clone(), language_registry.clone(),
message_editor_context_store.clone(), message_editor_context_store.clone(),
workspace.clone(),
window, window,
cx, cx,
) )
@@ -267,7 +252,6 @@ impl AssistantPanel {
self.thread_store.clone(), self.thread_store.clone(),
self.language_registry.clone(), self.language_registry.clone(),
message_editor_context_store.clone(), message_editor_context_store.clone(),
self.workspace.clone(),
window, window,
cx, cx,
) )
@@ -317,7 +301,7 @@ impl AssistantPanel {
fn deploy_prompt_library( fn deploy_prompt_library(
&mut self, &mut self,
_: &OpenPromptLibrary, _: &DeployPromptLibrary,
_window: &mut Window, _window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
@@ -405,7 +389,6 @@ impl AssistantPanel {
this.thread_store.clone(), this.thread_store.clone(),
this.language_registry.clone(), this.language_registry.clone(),
message_editor_context_store.clone(), message_editor_context_store.clone(),
this.workspace.clone(),
window, window,
cx, cx,
) )
@@ -472,7 +455,7 @@ impl AssistantPanel {
workspace.update_in(cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
let thread = thread.read(cx); let thread = thread.read(cx);
let markdown = thread.to_markdown(cx)?; let markdown = thread.to_markdown()?;
let thread_summary = thread let thread_summary = thread
.summary() .summary()
.map(|summary| summary.to_string()) .map(|summary| summary.to_string())
@@ -939,8 +922,8 @@ impl AssistantPanel {
ThreadError::MaxMonthlySpendReached => { ThreadError::MaxMonthlySpendReached => {
self.render_max_monthly_spend_reached_error(cx) self.render_max_monthly_spend_reached_error(cx)
} }
ThreadError::Message { header, message } => { ThreadError::Message(error_message) => {
self.render_error_message(header, message, cx) self.render_error_message(&error_message, cx)
} }
}) })
.into_any(), .into_any(),
@@ -1043,8 +1026,7 @@ impl AssistantPanel {
fn render_error_message( fn render_error_message(
&self, &self,
header: SharedString, error_message: &SharedString,
message: SharedString,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> AnyElement { ) -> AnyElement {
v_flex() v_flex()
@@ -1054,14 +1036,17 @@ impl AssistantPanel {
.gap_1p5() .gap_1p5()
.items_center() .items_center()
.child(Icon::new(IconName::XCircle).color(Color::Error)) .child(Icon::new(IconName::XCircle).color(Color::Error))
.child(Label::new(header).weight(FontWeight::MEDIUM)), .child(
Label::new("Error interacting with language model")
.weight(FontWeight::MEDIUM),
),
) )
.child( .child(
div() div()
.id("error-message") .id("error-message")
.max_h_32() .max_h_32()
.overflow_y_scroll() .overflow_y_scroll()
.child(Label::new(message)), .child(Label::new(error_message.clone())),
) )
.child( .child(
h_flex() h_flex()

View File

@@ -410,7 +410,7 @@ impl CodegenAlternative {
let mut request_message = LanguageModelRequestMessage { let mut request_message = LanguageModelRequestMessage {
role: Role::User, role: Role::User,
content: Vec::new(), content: Vec::new(),
cache: true, cache: false,
}; };
if let Some(context_store) = &self.context_store { if let Some(context_store) = &self.context_store {

View File

@@ -1,28 +1,19 @@
mod completion_provider;
mod fetch_context_picker; mod fetch_context_picker;
mod file_context_picker; mod file_context_picker;
mod thread_context_picker; mod thread_context_picker;
use std::ops::Range;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use editor::display_map::{Crease, FoldId}; use editor::Editor;
use editor::{Anchor, AnchorRangeExt as _, Editor, ExcerptId, FoldPlaceholder, ToOffset};
use file_context_picker::render_file_context_entry; use file_context_picker::render_file_context_entry;
use gpui::{ use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
App, DismissEvent, Empty, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity,
};
use multi_buffer::MultiBufferRow;
use project::ProjectPath; use project::ProjectPath;
use thread_context_picker::{render_thread_context_entry, ThreadContextEntry}; use thread_context_picker::{render_thread_context_entry, ThreadContextEntry};
use ui::{ use ui::{prelude::*, ContextMenu, ContextMenuEntry, ContextMenuItem};
prelude::*, ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor,
};
use workspace::{notifications::NotifyResultExt, Workspace}; use workspace::{notifications::NotifyResultExt, Workspace};
pub use crate::context_picker::completion_provider::ContextPickerCompletionProvider;
use crate::context_picker::fetch_context_picker::FetchContextPicker; use crate::context_picker::fetch_context_picker::FetchContextPicker;
use crate::context_picker::file_context_picker::FileContextPicker; use crate::context_picker::file_context_picker::FileContextPicker;
use crate::context_picker::thread_context_picker::ThreadContextPicker; use crate::context_picker::thread_context_picker::ThreadContextPicker;
@@ -43,31 +34,10 @@ enum ContextPickerMode {
Thread, Thread,
} }
impl TryFrom<&str> for ContextPickerMode {
type Error = String;
fn try_from(value: &str) -> Result<Self, Self::Error> {
match value {
"file" => Ok(Self::File),
"fetch" => Ok(Self::Fetch),
"thread" => Ok(Self::Thread),
_ => Err(format!("Invalid context picker mode: {}", value)),
}
}
}
impl ContextPickerMode { impl ContextPickerMode {
pub fn mention_prefix(&self) -> &'static str {
match self {
Self::File => "file",
Self::Fetch => "fetch",
Self::Thread => "thread",
}
}
pub fn label(&self) -> &'static str { pub fn label(&self) -> &'static str {
match self { match self {
Self::File => "Files & Directories", Self::File => "File/Directory",
Self::Fetch => "Fetch", Self::Fetch => "Fetch",
Self::Thread => "Thread", Self::Thread => "Thread",
} }
@@ -93,6 +63,7 @@ enum ContextPickerState {
pub(super) struct ContextPicker { pub(super) struct ContextPicker {
mode: ContextPickerState, mode: ContextPickerState,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
thread_store: Option<WeakEntity<ThreadStore>>, thread_store: Option<WeakEntity<ThreadStore>>,
confirm_behavior: ConfirmBehavior, confirm_behavior: ConfirmBehavior,
@@ -103,6 +74,7 @@ impl ContextPicker {
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
thread_store: Option<WeakEntity<ThreadStore>>, thread_store: Option<WeakEntity<ThreadStore>>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
editor: WeakEntity<Editor>,
confirm_behavior: ConfirmBehavior, confirm_behavior: ConfirmBehavior,
window: &mut Window, window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
@@ -116,6 +88,7 @@ impl ContextPicker {
workspace, workspace,
context_store, context_store,
thread_store, thread_store,
editor,
confirm_behavior, confirm_behavior,
} }
} }
@@ -136,7 +109,10 @@ impl ContextPicker {
.enumerate() .enumerate()
.map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry)); .map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry));
let modes = supported_context_picker_modes(&self.thread_store); let mut modes = vec![ContextPickerMode::File, ContextPickerMode::Fetch];
if self.allow_threads() {
modes.push(ContextPickerMode::Thread);
}
let menu = menu let menu = menu
.when(has_recent, |menu| { .when(has_recent, |menu| {
@@ -198,6 +174,7 @@ impl ContextPicker {
FileContextPicker::new( FileContextPicker::new(
context_picker.clone(), context_picker.clone(),
self.workspace.clone(), self.workspace.clone(),
self.editor.clone(),
self.context_store.clone(), self.context_store.clone(),
self.confirm_behavior, self.confirm_behavior,
window, window,
@@ -301,7 +278,7 @@ impl ContextPicker {
}; };
let task = context_store.update(cx, |context_store, cx| { let task = context_store.update(cx, |context_store, cx| {
context_store.add_file_from_path(project_path.clone(), true, cx) context_store.add_file_from_path(project_path.clone(), cx)
}); });
cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx)) cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
@@ -331,7 +308,7 @@ impl ContextPicker {
cx.spawn(async move |this, cx| { cx.spawn(async move |this, cx| {
let thread = open_thread_task.await?; let thread = open_thread_task.await?;
context_store.update(cx, |context_store, cx| { context_store.update(cx, |context_store, cx| {
context_store.add_thread(thread, true, cx); context_store.add_thread(thread, cx);
})?; })?;
this.update(cx, |_this, cx| cx.notify()) this.update(cx, |_this, cx| cx.notify())
@@ -351,7 +328,7 @@ impl ContextPicker {
let mut current_files = context_store.file_paths(cx); let mut current_files = context_store.file_paths(cx);
if let Some(active_path) = active_singleton_buffer_path(&workspace, cx) { if let Some(active_path) = Self::active_singleton_buffer_path(&workspace, cx) {
current_files.insert(active_path); current_files.insert(active_path);
} }
@@ -407,6 +384,16 @@ impl ContextPicker {
recent recent
} }
fn active_singleton_buffer_path(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
let active_item = workspace.active_item(cx)?;
let editor = active_item.to_any().downcast::<Editor>().ok()?.read(cx);
let buffer = editor.buffer().read(cx).as_singleton()?;
let path = buffer.read(cx).file()?.path().to_path_buf();
Some(path)
}
} }
impl EventEmitter<DismissEvent> for ContextPicker {} impl EventEmitter<DismissEvent> for ContextPicker {}
@@ -442,212 +429,3 @@ enum RecentEntry {
}, },
Thread(ThreadContextEntry), Thread(ThreadContextEntry),
} }
fn supported_context_picker_modes(
thread_store: &Option<WeakEntity<ThreadStore>>,
) -> Vec<ContextPickerMode> {
let mut modes = vec![ContextPickerMode::File, ContextPickerMode::Fetch];
if thread_store.is_some() {
modes.push(ContextPickerMode::Thread);
}
modes
}
fn active_singleton_buffer_path(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
let active_item = workspace.active_item(cx)?;
let editor = active_item.to_any().downcast::<Editor>().ok()?.read(cx);
let buffer = editor.buffer().read(cx).as_singleton()?;
let path = buffer.read(cx).file()?.path().to_path_buf();
Some(path)
}
fn recent_context_picker_entries(
context_store: Entity<ContextStore>,
thread_store: Option<WeakEntity<ThreadStore>>,
workspace: Entity<Workspace>,
cx: &App,
) -> Vec<RecentEntry> {
let mut recent = Vec::with_capacity(6);
let mut current_files = context_store.read(cx).file_paths(cx);
let workspace = workspace.read(cx);
if let Some(active_path) = active_singleton_buffer_path(workspace, cx) {
current_files.insert(active_path);
}
let project = workspace.project().read(cx);
recent.extend(
workspace
.recent_navigation_history_iter(cx)
.filter(|(path, _)| !current_files.contains(&path.path.to_path_buf()))
.take(4)
.filter_map(|(project_path, _)| {
project
.worktree_for_id(project_path.worktree_id, cx)
.map(|worktree| RecentEntry::File {
project_path,
path_prefix: worktree.read(cx).root_name().into(),
})
}),
);
let mut current_threads = context_store.read(cx).thread_ids();
if let Some(active_thread) = workspace
.panel::<AssistantPanel>(cx)
.map(|panel| panel.read(cx).active_thread(cx))
{
current_threads.insert(active_thread.read(cx).id().clone());
}
if let Some(thread_store) = thread_store.and_then(|thread_store| thread_store.upgrade()) {
recent.extend(
thread_store
.read(cx)
.threads()
.into_iter()
.filter(|thread| !current_threads.contains(&thread.id))
.take(2)
.map(|thread| {
RecentEntry::Thread(ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
}),
);
}
recent
}
pub(crate) fn insert_crease_for_mention(
excerpt_id: ExcerptId,
crease_start: text::Anchor,
content_len: usize,
crease_label: SharedString,
crease_icon_path: SharedString,
editor_entity: Entity<Editor>,
window: &mut Window,
cx: &mut App,
) {
editor_entity.update(cx, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let Some(start) = snapshot.anchor_in_excerpt(excerpt_id, crease_start) else {
return;
};
let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len);
let placeholder = FoldPlaceholder {
render: render_fold_icon_button(
crease_icon_path,
crease_label,
editor_entity.downgrade(),
),
..Default::default()
};
let render_trailer =
move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
let crease = Crease::inline(
start..end,
placeholder.clone(),
fold_toggle("mention"),
render_trailer,
);
editor.insert_creases(vec![crease.clone()], cx);
editor.fold_creases(vec![crease], false, window, cx);
});
}
fn render_fold_icon_button(
icon_path: SharedString,
label: SharedString,
editor: WeakEntity<Editor>,
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
Arc::new({
move |fold_id, fold_range, cx| {
let is_in_text_selection = editor.upgrade().is_some_and(|editor| {
editor.update(cx, |editor, cx| {
let snapshot = editor
.buffer()
.update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx));
let is_in_pending_selection = || {
editor
.selections
.pending
.as_ref()
.is_some_and(|pending_selection| {
pending_selection
.selection
.range()
.includes(&fold_range, &snapshot)
})
};
let mut is_in_complete_selection = || {
editor
.selections
.disjoint_in_range::<usize>(fold_range.clone(), cx)
.into_iter()
.any(|selection| {
// This is needed to cover a corner case, if we just check for an existing
// selection in the fold range, having a cursor at the start of the fold
// marks it as selected. Non-empty selections don't cause this.
let length = selection.end - selection.start;
length > 0
})
};
is_in_pending_selection() || is_in_complete_selection()
})
});
ButtonLike::new(fold_id)
.style(ButtonStyle::Filled)
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
.toggle_state(is_in_text_selection)
.child(
h_flex()
.gap_1()
.child(
Icon::from_path(icon_path.clone())
.size(IconSize::Small)
.color(Color::Muted),
)
.child(
Label::new(label.clone())
.size(LabelSize::Small)
.single_line(),
),
)
.into_any_element()
}
})
}
fn fold_toggle(
name: &'static str,
) -> impl Fn(
MultiBufferRow,
bool,
Arc<dyn Fn(bool, &mut Window, &mut App) + Send + Sync>,
&mut Window,
&mut App,
) -> AnyElement {
move |row, is_folded, fold, _window, _cx| {
Disclosure::new((name, row.0 as u64), !is_folded)
.toggle_state(is_folded)
.on_click(move |_e, window, cx| fold(!is_folded, window, cx))
.into_any_element()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -81,80 +81,77 @@ impl FetchContextPickerDelegate {
url: String::new(), url: String::new(),
} }
} }
}
pub(crate) async fn fetch_url_content( async fn build_message(http_client: Arc<HttpClientWithUrl>, url: String) -> Result<String> {
http_client: Arc<HttpClientWithUrl>, let url = if !url.starts_with("https://") && !url.starts_with("http://") {
url: String, format!("https://{url}")
) -> Result<String> { } else {
let url = if !url.starts_with("https://") && !url.starts_with("http://") { url
format!("https://{url}") };
} else {
url
};
let mut response = http_client.get(&url, AsyncBody::default(), true).await?; let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
let mut body = Vec::new(); let mut body = Vec::new();
response response
.body_mut() .body_mut()
.read_to_end(&mut body) .read_to_end(&mut body)
.await .await
.context("error reading response body")?; .context("error reading response body")?;
if response.status().is_client_error() { if response.status().is_client_error() {
let text = String::from_utf8_lossy(body.as_slice()); let text = String::from_utf8_lossy(body.as_slice());
bail!( bail!(
"status error {}, response: {text:?}", "status error {}, response: {text:?}",
response.status().as_u16() response.status().as_u16()
); );
}
let Some(content_type) = response.headers().get("content-type") else {
bail!("missing Content-Type header");
};
let content_type = content_type
.to_str()
.context("invalid Content-Type header")?;
let content_type = match content_type {
"text/html" => ContentType::Html,
"text/plain" => ContentType::Plaintext,
"application/json" => ContentType::Json,
_ => ContentType::Html,
};
match content_type {
ContentType::Html => {
let mut handlers: Vec<TagHandler> = vec![
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
Rc::new(RefCell::new(markdown::ParagraphHandler)),
Rc::new(RefCell::new(markdown::HeadingHandler)),
Rc::new(RefCell::new(markdown::ListHandler)),
Rc::new(RefCell::new(markdown::TableHandler::new())),
Rc::new(RefCell::new(markdown::StyledTextHandler)),
];
if url.contains("wikipedia.org") {
use html_to_markdown::structure::wikipedia;
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
handlers.push(Rc::new(
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
));
} else {
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
}
convert_html_to_markdown(&body[..], &mut handlers)
} }
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
ContentType::Json => {
let json: serde_json::Value = serde_json::from_slice(&body)?;
Ok(format!( let Some(content_type) = response.headers().get("content-type") else {
"```json\n{}\n```", bail!("missing Content-Type header");
serde_json::to_string_pretty(&json)? };
)) let content_type = content_type
.to_str()
.context("invalid Content-Type header")?;
let content_type = match content_type {
"text/html" => ContentType::Html,
"text/plain" => ContentType::Plaintext,
"application/json" => ContentType::Json,
_ => ContentType::Html,
};
match content_type {
ContentType::Html => {
let mut handlers: Vec<TagHandler> = vec![
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
Rc::new(RefCell::new(markdown::ParagraphHandler)),
Rc::new(RefCell::new(markdown::HeadingHandler)),
Rc::new(RefCell::new(markdown::ListHandler)),
Rc::new(RefCell::new(markdown::TableHandler::new())),
Rc::new(RefCell::new(markdown::StyledTextHandler)),
];
if url.contains("wikipedia.org") {
use html_to_markdown::structure::wikipedia;
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
handlers.push(Rc::new(
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
));
} else {
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
}
convert_html_to_markdown(&body[..], &mut handlers)
}
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
ContentType::Json => {
let json: serde_json::Value = serde_json::from_slice(&body)?;
Ok(format!(
"```json\n{}\n```",
serde_json::to_string_pretty(&json)?
))
}
} }
} }
} }
@@ -211,7 +208,7 @@ impl PickerDelegate for FetchContextPickerDelegate {
let confirm_behavior = self.confirm_behavior; let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, async move |this, cx| { cx.spawn_in(window, async move |this, cx| {
let text = cx let text = cx
.background_spawn(fetch_url_content(http_client, url.clone())) .background_spawn(Self::build_message(http_client, url.clone()))
.await?; .await?;
this.update_in(cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {

View File

@@ -1,15 +1,25 @@
use std::collections::BTreeSet;
use std::ops::Range;
use std::path::Path; use std::path::Path;
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use std::sync::Arc; use std::sync::Arc;
use editor::actions::FoldAt;
use editor::display_map::{Crease, FoldId};
use editor::scroll::Autoscroll;
use editor::{Anchor, AnchorRangeExt, Editor, FoldPlaceholder, ToPoint};
use file_icons::FileIcons; use file_icons::FileIcons;
use fuzzy::PathMatch; use fuzzy::PathMatch;
use gpui::{ use gpui::{
App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity, AnyElement, App, AppContext, DismissEvent, Empty, Entity, FocusHandle, Focusable, Stateful,
Task, WeakEntity,
}; };
use multi_buffer::{MultiBufferPoint, MultiBufferRow};
use picker::{Picker, PickerDelegate}; use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId}; use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
use ui::{prelude::*, ListItem, Tooltip}; use rope::Point;
use text::SelectionGoal;
use ui::{prelude::*, ButtonLike, Disclosure, ListItem, TintColor, Tooltip};
use util::ResultExt as _; use util::ResultExt as _;
use workspace::{notifications::NotifyResultExt, Workspace}; use workspace::{notifications::NotifyResultExt, Workspace};
@@ -24,6 +34,7 @@ impl FileContextPicker {
pub fn new( pub fn new(
context_picker: WeakEntity<ContextPicker>, context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior, confirm_behavior: ConfirmBehavior,
window: &mut Window, window: &mut Window,
@@ -32,6 +43,7 @@ impl FileContextPicker {
let delegate = FileContextPickerDelegate::new( let delegate = FileContextPickerDelegate::new(
context_picker, context_picker,
workspace, workspace,
editor,
context_store, context_store,
confirm_behavior, confirm_behavior,
); );
@@ -56,6 +68,7 @@ impl Render for FileContextPicker {
pub struct FileContextPickerDelegate { pub struct FileContextPickerDelegate {
context_picker: WeakEntity<ContextPicker>, context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior, confirm_behavior: ConfirmBehavior,
matches: Vec<PathMatch>, matches: Vec<PathMatch>,
@@ -66,18 +79,95 @@ impl FileContextPickerDelegate {
pub fn new( pub fn new(
context_picker: WeakEntity<ContextPicker>, context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior, confirm_behavior: ConfirmBehavior,
) -> Self { ) -> Self {
Self { Self {
context_picker, context_picker,
workspace, workspace,
editor,
context_store, context_store,
confirm_behavior, confirm_behavior,
matches: Vec::new(), matches: Vec::new(),
selected_index: 0, selected_index: 0,
} }
} }
fn search(
&mut self,
query: String,
cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>,
cx: &mut Context<Picker<Self>>,
) -> Task<Vec<PathMatch>> {
if query.is_empty() {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let recent_matches = workspace
.recent_navigation_history(Some(10), cx)
.into_iter()
.filter_map(|(project_path, _)| {
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
Some(PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: project_path.worktree_id.to_usize(),
path: project_path.path,
path_prefix: worktree.read(cx).root_name().into(),
distance_to_relative_ancestor: 0,
is_dir: false,
})
});
let file_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into();
worktree.entries(false, 0).map(move |entry| PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: worktree.id().to_usize(),
path: entry.path.clone(),
path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0,
is_dir: entry.is_dir(),
})
});
Task::ready(recent_matches.chain(file_matches).collect())
} else {
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
let candidate_sets = worktrees
.into_iter()
.map(|worktree| {
let worktree = worktree.read(cx);
PathMatchCandidateSet {
snapshot: worktree.snapshot(),
include_ignored: worktree
.root_entry()
.map_or(false, |entry| entry.is_ignored),
include_root_name: true,
candidates: project::Candidates::Entries,
}
})
.collect::<Vec<_>>();
let executor = cx.background_executor().clone();
cx.foreground_executor().spawn(async move {
fuzzy::match_path_sets(
candidate_sets.as_slice(),
query.as_str(),
None,
false,
100,
&cancellation_flag,
executor,
)
.await
})
}
}
} }
impl PickerDelegate for FileContextPickerDelegate { impl PickerDelegate for FileContextPickerDelegate {
@@ -114,7 +204,7 @@ impl PickerDelegate for FileContextPickerDelegate {
return Task::ready(()); return Task::ready(());
}; };
let search_task = search_paths(query, Arc::<AtomicBool>::default(), &workspace, cx); let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn_in(window, async move |this, cx| { cx.spawn_in(window, async move |this, cx| {
// TODO: This should be probably be run in the background. // TODO: This should be probably be run in the background.
@@ -132,6 +222,14 @@ impl PickerDelegate for FileContextPickerDelegate {
return; return;
}; };
let file_name = mat
.path
.file_name()
.map(|os_str| os_str.to_string_lossy().into_owned())
.unwrap_or(mat.path_prefix.to_string());
let full_path = mat.path.display().to_string();
let project_path = ProjectPath { let project_path = ProjectPath {
worktree_id: WorktreeId::from_usize(mat.worktree_id), worktree_id: WorktreeId::from_usize(mat.worktree_id),
path: mat.path.clone(), path: mat.path.clone(),
@@ -139,13 +237,106 @@ impl PickerDelegate for FileContextPickerDelegate {
let is_directory = mat.is_dir; let is_directory = mat.is_dir;
let Some(editor_entity) = self.editor.upgrade() else {
return;
};
editor_entity.update(cx, |editor, cx| {
editor.transact(window, cx, |editor, window, cx| {
// Move empty selections left by 1 column to select the `@`s, so they get overwritten when we insert.
{
let mut selections = editor.selections.all::<MultiBufferPoint>(cx);
for selection in selections.iter_mut() {
if selection.is_empty() {
let old_head = selection.head();
let new_head = MultiBufferPoint::new(
old_head.row,
old_head.column.saturating_sub(1),
);
selection.set_head(new_head, SelectionGoal::None);
}
}
editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
s.select(selections)
});
}
let start_anchors = {
let snapshot = editor.buffer().read(cx).snapshot(cx);
editor
.selections
.all::<Point>(cx)
.into_iter()
.map(|selection| snapshot.anchor_before(selection.start))
.collect::<Vec<_>>()
};
editor.insert(&full_path, window, cx);
let end_anchors = {
let snapshot = editor.buffer().read(cx).snapshot(cx);
editor
.selections
.all::<Point>(cx)
.into_iter()
.map(|selection| snapshot.anchor_after(selection.end))
.collect::<Vec<_>>()
};
editor.insert("\n", window, cx); // Needed to end the fold
let file_icon = if is_directory {
FileIcons::get_folder_icon(false, cx)
} else {
FileIcons::get_icon(&Path::new(&full_path), cx)
}
.unwrap_or_else(|| SharedString::new(""));
let placeholder = FoldPlaceholder {
render: render_fold_icon_button(
file_icon,
file_name.into(),
editor_entity.downgrade(),
),
..Default::default()
};
let render_trailer =
move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
let buffer = editor.buffer().read(cx).snapshot(cx);
let mut rows_to_fold = BTreeSet::new();
let crease_iter = start_anchors
.into_iter()
.zip(end_anchors)
.map(|(start, end)| {
rows_to_fold.insert(MultiBufferRow(start.to_point(&buffer).row));
Crease::inline(
start..end,
placeholder.clone(),
fold_toggle("tool-use"),
render_trailer,
)
});
editor.insert_creases(crease_iter, cx);
for buffer_row in rows_to_fold {
editor.fold_at(&FoldAt { buffer_row }, window, cx);
}
});
});
let Some(task) = self let Some(task) = self
.context_store .context_store
.update(cx, |context_store, cx| { .update(cx, |context_store, cx| {
if is_directory { if is_directory {
context_store.add_directory(project_path, true, cx) context_store.add_directory(project_path, cx)
} else { } else {
context_store.add_file_from_path(project_path, true, cx) context_store.add_file_from_path(project_path, cx)
} }
}) })
.ok() .ok()
@@ -199,80 +390,6 @@ impl PickerDelegate for FileContextPickerDelegate {
} }
} }
pub(crate) fn search_paths(
query: String,
cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>,
cx: &App,
) -> Task<Vec<PathMatch>> {
if query.is_empty() {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let recent_matches = workspace
.recent_navigation_history(Some(10), cx)
.into_iter()
.filter_map(|(project_path, _)| {
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
Some(PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: project_path.worktree_id.to_usize(),
path: project_path.path,
path_prefix: worktree.read(cx).root_name().into(),
distance_to_relative_ancestor: 0,
is_dir: false,
})
});
let file_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into();
worktree.entries(false, 0).map(move |entry| PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: worktree.id().to_usize(),
path: entry.path.clone(),
path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0,
is_dir: entry.is_dir(),
})
});
Task::ready(recent_matches.chain(file_matches).collect())
} else {
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
let candidate_sets = worktrees
.into_iter()
.map(|worktree| {
let worktree = worktree.read(cx);
PathMatchCandidateSet {
snapshot: worktree.snapshot(),
include_ignored: worktree
.root_entry()
.map_or(false, |entry| entry.is_ignored),
include_root_name: true,
candidates: project::Candidates::Entries,
}
})
.collect::<Vec<_>>();
let executor = cx.background_executor().clone();
cx.foreground_executor().spawn(async move {
fuzzy::match_path_sets(
candidate_sets.as_slice(),
query.as_str(),
None,
false,
100,
&cancellation_flag,
executor,
)
.await
})
}
}
pub fn render_file_context_entry( pub fn render_file_context_entry(
id: ElementId, id: ElementId,
path: &Path, path: &Path,
@@ -367,3 +484,85 @@ pub fn render_file_context_entry(
} }
}) })
} }
fn render_fold_icon_button(
icon: SharedString,
label: SharedString,
editor: WeakEntity<Editor>,
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
Arc::new(move |fold_id, fold_range, cx| {
let is_in_text_selection = editor.upgrade().is_some_and(|editor| {
editor.update(cx, |editor, cx| {
let snapshot = editor
.buffer()
.update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx));
let is_in_pending_selection = || {
editor
.selections
.pending
.as_ref()
.is_some_and(|pending_selection| {
pending_selection
.selection
.range()
.includes(&fold_range, &snapshot)
})
};
let mut is_in_complete_selection = || {
editor
.selections
.disjoint_in_range::<usize>(fold_range.clone(), cx)
.into_iter()
.any(|selection| {
// This is needed to cover a corner case, if we just check for an existing
// selection in the fold range, having a cursor at the start of the fold
// marks it as selected. Non-empty selections don't cause this.
let length = selection.end - selection.start;
length > 0
})
};
is_in_pending_selection() || is_in_complete_selection()
})
});
ButtonLike::new(fold_id)
.style(ButtonStyle::Filled)
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
.toggle_state(is_in_text_selection)
.child(
h_flex()
.gap_1()
.child(
Icon::from_path(icon.clone())
.size(IconSize::Small)
.color(Color::Muted),
)
.child(
Label::new(label.clone())
.size(LabelSize::Small)
.single_line(),
),
)
.into_any_element()
})
}
fn fold_toggle(
name: &'static str,
) -> impl Fn(
MultiBufferRow,
bool,
Arc<dyn Fn(bool, &mut Window, &mut App) + Send + Sync>,
&mut Window,
&mut App,
) -> AnyElement {
move |row, is_folded, fold, _window, _cx| {
Disclosure::new((name, row.0 as u64), !is_folded)
.toggle_state(is_folded)
.on_click(move |_e, window, cx| fold(!is_folded, window, cx))
.into_any_element()
}
}

View File

@@ -110,11 +110,45 @@ impl PickerDelegate for ThreadContextPickerDelegate {
window: &mut Window, window: &mut Window,
cx: &mut Context<Picker<Self>>, cx: &mut Context<Picker<Self>>,
) -> Task<()> { ) -> Task<()> {
let Some(threads) = self.thread_store.upgrade() else { let Ok(threads) = self.thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
}) else {
return Task::ready(()); return Task::ready(());
}; };
let search_task = search_threads(query, threads, cx); let executor = cx.background_executor().clone();
let search_task = cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
});
cx.spawn_in(window, async move |this, cx| { cx.spawn_in(window, async move |this, cx| {
let matches = search_task.await; let matches = search_task.await;
this.update(cx, |this, cx| { this.update(cx, |this, cx| {
@@ -142,9 +176,7 @@ impl PickerDelegate for ThreadContextPickerDelegate {
this.update_in(cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.delegate this.delegate
.context_store .context_store
.update(cx, |context_store, cx| { .update(cx, |context_store, cx| context_store.add_thread(thread, cx))
context_store.add_thread(thread, true, cx)
})
.ok(); .ok();
match this.delegate.confirm_behavior { match this.delegate.confirm_behavior {
@@ -216,46 +248,3 @@ pub fn render_thread_context_entry(
) )
}) })
} }
pub(crate) fn search_threads(
query: String,
thread_store: Entity<ThreadStore>,
cx: &mut App,
) -> Task<Vec<ThreadContextEntry>> {
let threads = thread_store.update(cx, |this, _cx| {
this.threads()
.into_iter()
.map(|thread| ThreadContextEntry {
id: thread.id,
summary: thread.summary,
})
.collect::<Vec<_>>()
});
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
if query.is_empty() {
threads
} else {
let candidates = threads
.iter()
.enumerate()
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
.collect::<Vec<_>>();
let matches = fuzzy::match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
executor,
)
.await;
matches
.into_iter()
.map(|mat| threads[mat.candidate_id].clone())
.collect()
}
})
}

View File

@@ -64,7 +64,6 @@ impl ContextStore {
pub fn add_file_from_path( pub fn add_file_from_path(
&mut self, &mut self,
project_path: ProjectPath, project_path: ProjectPath,
remove_if_exists: bool,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
@@ -87,9 +86,7 @@ impl ContextStore {
let already_included = this.update(cx, |this, _cx| { let already_included = this.update(cx, |this, _cx| {
match this.will_include_buffer(buffer_id, &project_path.path) { match this.will_include_buffer(buffer_id, &project_path.path) {
Some(FileInclusion::Direct(context_id)) => { Some(FileInclusion::Direct(context_id)) => {
if remove_if_exists { this.remove_context(context_id);
this.remove_context(context_id);
}
true true
} }
Some(FileInclusion::InDirectory(_)) => true, Some(FileInclusion::InDirectory(_)) => true,
@@ -160,7 +157,6 @@ impl ContextStore {
pub fn add_directory( pub fn add_directory(
&mut self, &mut self,
project_path: ProjectPath, project_path: ProjectPath,
remove_if_exists: bool,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
@@ -173,9 +169,7 @@ impl ContextStore {
let already_included = if let Some(context_id) = self.includes_directory(&project_path.path) let already_included = if let Some(context_id) = self.includes_directory(&project_path.path)
{ {
if remove_if_exists { self.remove_context(context_id);
self.remove_context(context_id);
}
true true
} else { } else {
false false
@@ -262,16 +256,9 @@ impl ContextStore {
))); )));
} }
pub fn add_thread( pub fn add_thread(&mut self, thread: Entity<Thread>, cx: &mut Context<Self>) {
&mut self,
thread: Entity<Thread>,
remove_if_exists: bool,
cx: &mut Context<Self>,
) {
if let Some(context_id) = self.includes_thread(&thread.read(cx).id()) { if let Some(context_id) = self.includes_thread(&thread.read(cx).id()) {
if remove_if_exists { self.remove_context(context_id);
self.remove_context(context_id);
}
} else { } else {
self.insert_thread(thread, cx); self.insert_thread(thread, cx);
} }

View File

@@ -39,6 +39,7 @@ impl ContextStrip {
pub fn new( pub fn new(
context_store: Entity<ContextStore>, context_store: Entity<ContextStore>,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>,
thread_store: Option<WeakEntity<ThreadStore>>, thread_store: Option<WeakEntity<ThreadStore>>,
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>, context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
suggest_context_kind: SuggestContextKind, suggest_context_kind: SuggestContextKind,
@@ -50,6 +51,7 @@ impl ContextStrip {
workspace.clone(), workspace.clone(),
thread_store.clone(), thread_store.clone(),
context_store.downgrade(), context_store.downgrade(),
editor.clone(),
ConfirmBehavior::KeepOpen, ConfirmBehavior::KeepOpen,
window, window,
cx, cx,

View File

@@ -324,7 +324,7 @@ impl InlineAssistant {
) { ) {
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| { let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
( (
editor.snapshot(window, cx), editor.buffer().read(cx).snapshot(cx),
editor.selections.all::<Point>(cx), editor.selections.all::<Point>(cx),
) )
}); });
@@ -338,37 +338,7 @@ impl InlineAssistant {
if selection.end.column == 0 { if selection.end.column == 0 {
selection.end.row -= 1; selection.end.row -= 1;
} }
selection.end.column = snapshot selection.end.column = snapshot.line_len(MultiBufferRow(selection.end.row));
.buffer_snapshot
.line_len(MultiBufferRow(selection.end.row));
} else if let Some(fold) =
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
{
selection.start = fold.range().start;
selection.end = fold.range().end;
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot.max_row() {
let chars = snapshot
.buffer_snapshot
.chars_at(Point::new(selection.end.row + 1, 0));
for c in chars {
if c == '\n' {
break;
}
if c.is_whitespace() {
continue;
}
if snapshot
.language_at(selection.end)
.is_some_and(|language| language.config().brackets.is_closing_brace(c))
{
selection.end.row += 1;
selection.end.column = snapshot
.buffer_snapshot
.line_len(MultiBufferRow(selection.end.row));
}
}
}
} }
if let Some(prev_selection) = selections.last_mut() { if let Some(prev_selection) = selections.last_mut() {
@@ -384,7 +354,6 @@ impl InlineAssistant {
} }
selections.push(selection); selections.push(selection);
} }
let snapshot = &snapshot.buffer_snapshot;
let newest_selection = newest_selection.unwrap(); let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new(); let mut codegen_ranges = Vec::new();

View File

@@ -861,6 +861,7 @@ impl PromptEditor<BufferCodegen> {
ContextStrip::new( ContextStrip::new(
context_store.clone(), context_store.clone(),
workspace.clone(), workspace.clone(),
prompt_editor.downgrade(),
thread_store.clone(), thread_store.clone(),
context_picker_menu_handle.clone(), context_picker_menu_handle.clone(),
SuggestContextKind::Thread, SuggestContextKind::Thread,
@@ -1013,6 +1014,7 @@ impl PromptEditor<TerminalCodegen> {
ContextStrip::new( ContextStrip::new(
context_store.clone(), context_store.clone(),
workspace.clone(), workspace.clone(),
prompt_editor.downgrade(),
thread_store.clone(), thread_store.clone(),
context_picker_menu_handle.clone(), context_picker_menu_handle.clone(),
SuggestContextKind::Thread, SuggestContextKind::Thread,

View File

@@ -2,39 +2,42 @@ use std::sync::Arc;
use collections::HashSet; use collections::HashSet;
use editor::actions::MoveUp; use editor::actions::MoveUp;
use editor::{ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, EditorStyle}; use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
use fs::Fs; use fs::Fs;
use git::ExpandCommitEditor; use git::ExpandCommitEditor;
use git_ui::git_panel; use git_ui::git_panel;
use gpui::{ use gpui::{
point, Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle, Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
WeakEntity, WeakEntity,
}; };
use language_model::LanguageModelRegistry; use language_model::LanguageModelRegistry;
use language_model_selector::ToggleModelSelector; use language_model_selector::ToggleModelSelector;
use project::Project; use project::Project;
use rope::Point;
use settings::Settings; use settings::Settings;
use std::time::Duration; use std::time::Duration;
use text::Bias;
use theme::ThemeSettings; use theme::ThemeSettings;
use ui::{ use ui::{
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
}; };
use util::ResultExt;
use vim_mode_setting::VimModeSetting; use vim_mode_setting::VimModeSetting;
use workspace::Workspace; use workspace::notifications::{NotificationId, NotifyTaskExt};
use workspace::{Toast, Workspace};
use crate::assistant_model_selector::AssistantModelSelector; use crate::assistant_model_selector::AssistantModelSelector;
use crate::context_picker::{ConfirmBehavior, ContextPicker, ContextPickerCompletionProvider}; use crate::context_picker::{ConfirmBehavior, ContextPicker};
use crate::context_store::{refresh_context_store_text, ContextStore}; use crate::context_store::{refresh_context_store_text, ContextStore};
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind}; use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
use crate::thread::{RequestKind, Thread}; use crate::thread::{RequestKind, Thread};
use crate::thread_store::ThreadStore; use crate::thread_store::ThreadStore;
use crate::tool_selector::ToolSelector; use crate::tool_selector::ToolSelector;
use crate::{Chat, ChatMode, RemoveAllContext, ThreadEvent, ToggleContextPicker}; use crate::{Chat, ChatMode, RemoveAllContext, ToggleContextPicker};
pub struct MessageEditor { pub struct MessageEditor {
thread: Entity<Thread>, thread: Entity<Thread>,
editor: Entity<Editor>, editor: Entity<Editor>,
#[allow(dead_code)]
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
project: Entity<Project>, project: Entity<Project>,
context_store: Entity<ContextStore>, context_store: Entity<ContextStore>,
@@ -66,30 +69,16 @@ impl MessageEditor {
let mut editor = Editor::auto_height(10, window, cx); let mut editor = Editor::auto_height(10, window, cx);
editor.set_placeholder_text("Ask anything, @ to mention, ↑ to select", cx); editor.set_placeholder_text("Ask anything, @ to mention, ↑ to select", cx);
editor.set_show_indent_guides(false, cx); editor.set_show_indent_guides(false, cx);
editor.set_context_menu_options(ContextMenuOptions {
min_entries_visible: 12,
max_entries_visible: 12,
placement: Some(ContextMenuPlacement::Above),
});
editor editor
}); });
let editor_entity = editor.downgrade();
editor.update(cx, |editor, _| {
editor.set_completion_provider(Some(Box::new(ContextPickerCompletionProvider::new(
workspace.clone(),
context_store.downgrade(),
Some(thread_store.clone()),
editor_entity,
))));
});
let inline_context_picker = cx.new(|cx| { let inline_context_picker = cx.new(|cx| {
ContextPicker::new( ContextPicker::new(
workspace.clone(), workspace.clone(),
Some(thread_store.clone()), Some(thread_store.clone()),
context_store.downgrade(), context_store.downgrade(),
editor.downgrade(),
ConfirmBehavior::Close, ConfirmBehavior::Close,
window, window,
cx, cx,
@@ -100,6 +89,7 @@ impl MessageEditor {
ContextStrip::new( ContextStrip::new(
context_store.clone(), context_store.clone(),
workspace.clone(), workspace.clone(),
editor.downgrade(),
Some(thread_store.clone()), Some(thread_store.clone()),
context_picker_menu_handle.clone(), context_picker_menu_handle.clone(),
SuggestContextKind::File, SuggestContextKind::File,
@@ -109,6 +99,7 @@ impl MessageEditor {
}); });
let subscriptions = vec![ let subscriptions = vec![
cx.subscribe_in(&editor, window, Self::handle_editor_event),
cx.subscribe_in( cx.subscribe_in(
&inline_context_picker, &inline_context_picker,
window, window,
@@ -153,6 +144,7 @@ impl MessageEditor {
) { ) {
self.context_picker_menu_handle.toggle(window, cx); self.context_picker_menu_handle.toggle(window, cx);
} }
pub fn remove_all_context( pub fn remove_all_context(
&mut self, &mut self,
_: &RemoveAllContext, _: &RemoveAllContext,
@@ -214,23 +206,13 @@ impl MessageEditor {
let refresh_task = let refresh_task =
refresh_context_store_text(self.context_store.clone(), &HashSet::default(), cx); refresh_context_store_text(self.context_store.clone(), &HashSet::default(), cx);
let system_prompt_context_task = self.thread.read(cx).load_system_prompt_context(cx);
let thread = self.thread.clone(); let thread = self.thread.clone();
let context_store = self.context_store.clone(); let context_store = self.context_store.clone();
let checkpoint = self.project.read(cx).git_store().read(cx).checkpoint(cx); let git_store = self.project.read(cx).git_store();
let checkpoint = git_store.read(cx).checkpoint(cx);
cx.spawn(async move |_, cx| { cx.spawn(async move |_, cx| {
let checkpoint = checkpoint.await.ok();
refresh_task.await; refresh_task.await;
let (system_prompt_context, load_error) = system_prompt_context_task.await; let checkpoint = checkpoint.await.log_err();
thread
.update(cx, |thread, cx| {
thread.set_system_prompt_context(system_prompt_context);
if let Some(load_error) = load_error {
cx.emit(ThreadEvent::ShowError(load_error));
}
})
.ok();
thread thread
.update(cx, |thread, cx| { .update(cx, |thread, cx| {
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>(); let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
@@ -242,6 +224,34 @@ impl MessageEditor {
.detach(); .detach();
} }
fn handle_editor_event(
&mut self,
editor: &Entity<Editor>,
event: &EditorEvent,
window: &mut Window,
cx: &mut Context<Self>,
) {
match event {
EditorEvent::SelectionsChanged { .. } => {
editor.update(cx, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let newest_cursor = editor.selections.newest::<Point>(cx).head();
if newest_cursor.column > 0 {
let behind_cursor = snapshot.clip_point(
Point::new(newest_cursor.row, newest_cursor.column - 1),
Bias::Left,
);
let char_behind_cursor = snapshot.chars_at(behind_cursor).next();
if char_behind_cursor == Some('@') {
self.inline_context_picker_menu_handle.show(window, cx);
}
}
});
}
_ => {}
}
}
fn handle_inline_context_picker_event( fn handle_inline_context_picker_event(
&mut self, &mut self,
_inline_context_picker: &Entity<ContextPicker>, _inline_context_picker: &Entity<ContextPicker>,
@@ -280,6 +290,34 @@ impl MessageEditor {
self.context_strip.focus_handle(cx).focus(window); self.context_strip.focus_handle(cx).focus(window);
} }
} }
fn handle_feedback_click(
&mut self,
is_positive: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
let workspace = self.workspace.clone();
let report = self
.thread
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
cx.spawn(async move |_, cx| {
report.await?;
workspace.update(cx, |workspace, cx| {
let message = if is_positive {
"Positive feedback recorded. Thank you!"
} else {
"Negative feedback recorded. Thank you for helping us improve!"
};
struct ThreadFeedback;
let id = NotificationId::unique::<ThreadFeedback>();
workspace.show_toast(Toast::new(id, message).autohide(), cx)
})
})
.detach_and_notify_err(window, cx);
}
} }
impl Focusable for MessageEditor { impl Focusable for MessageEditor {
@@ -292,11 +330,9 @@ impl Render for MessageEditor {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement { fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let font_size = TextSize::Default.rems(cx); let font_size = TextSize::Default.rems(cx);
let line_height = font_size.to_pixels(window.rem_size()) * 1.5; let line_height = font_size.to_pixels(window.rem_size()) * 1.5;
let focus_handle = self.editor.focus_handle(cx); let focus_handle = self.editor.focus_handle(cx);
let inline_context_picker = self.inline_context_picker.clone(); let inline_context_picker = self.inline_context_picker.clone();
let bg_color = cx.theme().colors().editor_background;
let empty_thread = self.thread.read(cx).is_empty();
let is_generating = self.thread.read(cx).is_generating(); let is_generating = self.thread.read(cx).is_generating();
let is_model_selected = self.is_model_selected(cx); let is_model_selected = self.is_model_selected(cx);
let is_editor_empty = self.is_editor_empty(cx); let is_editor_empty = self.is_editor_empty(cx);
@@ -323,24 +359,6 @@ impl Render for MessageEditor {
0 0
}; };
let border_color = cx.theme().colors().border;
let active_color = cx.theme().colors().element_selected;
let editor_bg_color = cx.theme().colors().editor_background;
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
let edit_files_container = || {
h_flex()
.mx_2()
.py_1()
.pl_2p5()
.pr_1()
.bg(bg_edit_files_disclosure)
.border_1()
.border_color(border_color)
.justify_between()
.flex_wrap()
};
v_flex() v_flex()
.size_full() .size_full()
.when(is_generating, |parent| { .when(is_generating, |parent| {
@@ -352,7 +370,7 @@ impl Render for MessageEditor {
.pl_2() .pl_2()
.pr_1() .pr_1()
.py_1() .py_1()
.bg(editor_bg_color) .bg(cx.theme().colors().editor_background)
.border_1() .border_1()
.border_color(cx.theme().colors().border_variant) .border_color(cx.theme().colors().border_variant)
.rounded_lg() .rounded_lg()
@@ -401,163 +419,73 @@ impl Render for MessageEditor {
), ),
) )
}) })
.when( .when(changed_files > 0, |parent| {
changed_files > 0 && !is_generating && !empty_thread, parent.child(
|parent| { v_flex()
parent.child( .mx_2()
edit_files_container() .bg(cx.theme().colors().element_background)
.border_b_0() .border_1()
.rounded_t_md() .border_b_0()
.shadow(smallvec::smallvec![gpui::BoxShadow { .border_color(cx.theme().colors().border)
color: gpui::black().opacity(0.15), .rounded_t_md()
offset: point(px(1.), px(-1.)), .child(
blur_radius: px(3.), h_flex()
spread_radius: px(0.), .justify_between()
}]) .p_2()
.child( .child(
h_flex() h_flex()
.gap_2() .gap_2()
.child(Label::new("Edits").size(LabelSize::XSmall)) .child(
.child(div().size_1().rounded_full().bg(border_color)) IconButton::new(
.child( "edits-disclosure",
Label::new(format!( IconName::GitBranchSmall,
"{} {}", )
changed_files, .icon_size(IconSize::Small)
if changed_files == 1 { "file" } else { "files" } .on_click(
)) |_ev, _window, cx| {
.size(LabelSize::XSmall), cx.defer(|cx| {
), cx.dispatch_action(&git_panel::ToggleFocus)
) });
.child( },
h_flex() ),
.gap_1() )
.child( .child(
Button::new("panel", "Open Git Panel") Label::new(format!(
.label_size(LabelSize::XSmall) "{} {} changed",
.key_binding({ changed_files,
let focus_handle = focus_handle.clone(); if changed_files == 1 { "file" } else { "files" }
KeyBinding::for_action_in( ))
&git_panel::ToggleFocus, .size(LabelSize::XSmall)
&focus_handle, .color(Color::Muted),
window, ),
cx, )
) .child(
.map(|kb| kb.size(rems_from_px(10.))) h_flex()
}) .gap_2()
.on_click(|_ev, _window, cx| { .child(
cx.defer(|cx| { Button::new("review", "Review")
cx.dispatch_action(&git_panel::ToggleFocus) .label_size(LabelSize::XSmall)
}); .on_click(|_event, _window, cx| {
}), cx.defer(|cx| {
) cx.dispatch_action(
.child( &git_ui::project_diff::Diff,
Button::new("review", "Review Diff") );
.label_size(LabelSize::XSmall) });
.key_binding({ }),
let focus_handle = focus_handle.clone(); )
KeyBinding::for_action_in( .child(
&git_ui::project_diff::Diff, Button::new("commit", "Commit")
&focus_handle, .label_size(LabelSize::XSmall)
window, .on_click(|_event, _window, cx| {
cx, cx.defer(|cx| {
) cx.dispatch_action(&ExpandCommitEditor)
.map(|kb| kb.size(rems_from_px(10.))) });
}) }),
.on_click(|_event, _window, cx| { ),
cx.defer(|cx| { ),
cx.dispatch_action(&git_ui::project_diff::Diff) ),
}); )
}), })
)
.child(
Button::new("commit", "Commit Changes")
.label_size(LabelSize::XSmall)
.key_binding({
let focus_handle = focus_handle.clone();
KeyBinding::for_action_in(
&ExpandCommitEditor,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(10.)))
})
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&ExpandCommitEditor)
});
}),
),
),
)
},
)
.when(
changed_files > 0 && !is_generating && empty_thread,
|parent| {
parent.child(
edit_files_container()
.mb_2()
.rounded_md()
.child(
h_flex()
.gap_2()
.child(Label::new("Consider committing your changes before starting a fresh thread").size(LabelSize::XSmall))
.child(div().size_1().rounded_full().bg(border_color))
.child(
Label::new(format!(
"{} {}",
changed_files,
if changed_files == 1 { "file" } else { "files" }
))
.size(LabelSize::XSmall),
),
)
.child(
h_flex()
.gap_1()
.child(
Button::new("review", "Review Diff")
.label_size(LabelSize::XSmall)
.key_binding({
let focus_handle = focus_handle.clone();
KeyBinding::for_action_in(
&git_ui::project_diff::Diff,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(10.)))
})
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&git_ui::project_diff::Diff)
});
}),
)
.child(
Button::new("commit", "Commit Changes")
.label_size(LabelSize::XSmall)
.key_binding({
let focus_handle = focus_handle.clone();
KeyBinding::for_action_in(
&ExpandCommitEditor,
&focus_handle,
window,
cx,
)
.map(|kb| kb.size(rems_from_px(10.)))
})
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&ExpandCommitEditor)
});
}),
),
),
)
},
)
.child( .child(
v_flex() v_flex()
.key_context("MessageEditor") .key_context("MessageEditor")
@@ -572,10 +500,48 @@ impl Render for MessageEditor {
.on_action(cx.listener(Self::toggle_chat_mode)) .on_action(cx.listener(Self::toggle_chat_mode))
.gap_2() .gap_2()
.p_2() .p_2()
.bg(editor_bg_color) .bg(bg_color)
.border_t_1() .border_t_1()
.border_color(cx.theme().colors().border) .border_color(cx.theme().colors().border)
.child(h_flex().justify_between().child(self.context_strip.clone())) .child(
h_flex()
.justify_between()
.child(self.context_strip.clone())
.when(!self.thread.read(cx).is_empty(), |this| {
this.child(
h_flex()
.gap_2()
.child(
IconButton::new(
"feedback-thumbs-up",
IconName::ThumbsUp,
)
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Helpful"))
.on_click(
cx.listener(|this, _, window, cx| {
this.handle_feedback_click(true, window, cx);
}),
),
)
.child(
IconButton::new(
"feedback-thumbs-down",
IconName::ThumbsDown,
)
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Not Helpful"))
.on_click(
cx.listener(|this, _, window, cx| {
this.handle_feedback_click(false, window, cx);
}),
),
),
)
}),
)
.child( .child(
v_flex() v_flex()
.gap_5() .gap_5()
@@ -595,10 +561,9 @@ impl Render for MessageEditor {
EditorElement::new( EditorElement::new(
&self.editor, &self.editor,
EditorStyle { EditorStyle {
background: editor_bg_color, background: bg_color,
local_player: cx.theme().players().local(), local_player: cx.theme().players().local(),
text: text_style, text: text_style,
syntax: cx.theme().syntax().clone(),
..Default::default() ..Default::default()
}, },
) )

View File

@@ -247,7 +247,7 @@ impl TerminalInlineAssistant {
let mut request_message = LanguageModelRequestMessage { let mut request_message = LanguageModelRequestMessage {
role: Role::User, role: Role::User,
content: vec![], content: vec![],
cache: true, cache: false,
}; };
attach_context_to_message( attach_context_to_message(

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,3 @@
use std::borrow::Cow;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@@ -13,7 +12,7 @@ use futures::FutureExt as _;
use gpui::{ use gpui::{
prelude::*, App, BackgroundExecutor, Context, Entity, Global, ReadGlobal, SharedString, Task, prelude::*, App, BackgroundExecutor, Context, Entity, Global, ReadGlobal, SharedString, Task,
}; };
use heed::types::SerdeBincode; use heed::types::{SerdeBincode, SerdeJson};
use heed::Database; use heed::Database;
use language_model::{LanguageModelToolUseId, Role}; use language_model::{LanguageModelToolUseId, Role};
use project::Project; use project::Project;
@@ -21,7 +20,7 @@ use prompt_store::PromptBuilder;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util::ResultExt as _; use util::ResultExt as _;
use crate::thread::{MessageId, ProjectSnapshot, Thread, ThreadEvent, ThreadId}; use crate::thread::{MessageId, ProjectSnapshot, Thread, ThreadId};
pub fn init(cx: &mut App) { pub fn init(cx: &mut App) {
ThreadsDatabase::init(cx); ThreadsDatabase::init(cx);
@@ -114,7 +113,7 @@ impl ThreadStore {
.await? .await?
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?; .ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
let thread = this.update(cx, |this, cx| { this.update(cx, |this, cx| {
cx.new(|cx| { cx.new(|cx| {
Thread::deserialize( Thread::deserialize(
id.clone(), id.clone(),
@@ -125,19 +124,7 @@ impl ThreadStore {
cx, cx,
) )
}) })
})?; })
let (system_prompt_context, load_error) = thread
.update(cx, |thread, cx| thread.load_system_prompt_context(cx))?
.await;
thread.update(cx, |thread, cx| {
thread.set_system_prompt_context(system_prompt_context);
if let Some(load_error) = load_error {
cx.emit(ThreadEvent::ShowError(load_error));
}
})?;
Ok(thread)
}) })
} }
@@ -260,7 +247,6 @@ pub struct SerializedThreadMetadata {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct SerializedThread { pub struct SerializedThread {
pub version: String,
pub summary: SharedString, pub summary: SharedString,
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
pub messages: Vec<SerializedMessage>, pub messages: Vec<SerializedMessage>,
@@ -268,55 +254,17 @@ pub struct SerializedThread {
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>, pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
} }
impl SerializedThread {
pub const VERSION: &'static str = "0.1.0";
pub fn from_json(json: &[u8]) -> Result<Self> {
let saved_thread_json = serde_json::from_slice::<serde_json::Value>(json)?;
match saved_thread_json.get("version") {
Some(serde_json::Value::String(version)) => match version.as_str() {
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
saved_thread_json,
)?),
_ => Err(anyhow!(
"unrecognized serialized thread version: {}",
version
)),
},
None => {
let saved_thread =
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
Ok(saved_thread.upgrade())
}
version => Err(anyhow!(
"unrecognized serialized thread version: {:?}",
version
)),
}
}
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct SerializedMessage { pub struct SerializedMessage {
pub id: MessageId, pub id: MessageId,
pub role: Role, pub role: Role,
#[serde(default)] pub text: String,
pub segments: Vec<SerializedMessageSegment>,
#[serde(default)] #[serde(default)]
pub tool_uses: Vec<SerializedToolUse>, pub tool_uses: Vec<SerializedToolUse>,
#[serde(default)] #[serde(default)]
pub tool_results: Vec<SerializedToolResult>, pub tool_results: Vec<SerializedToolResult>,
} }
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum SerializedMessageSegment {
#[serde(rename = "text")]
Text { text: String },
#[serde(rename = "thinking")]
Thinking { text: String },
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct SerializedToolUse { pub struct SerializedToolUse {
pub id: LanguageModelToolUseId, pub id: LanguageModelToolUseId,
@@ -331,50 +279,6 @@ pub struct SerializedToolResult {
pub content: Arc<str>, pub content: Arc<str>,
} }
#[derive(Serialize, Deserialize)]
struct LegacySerializedThread {
pub summary: SharedString,
pub updated_at: DateTime<Utc>,
pub messages: Vec<LegacySerializedMessage>,
#[serde(default)]
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
}
impl LegacySerializedThread {
pub fn upgrade(self) -> SerializedThread {
SerializedThread {
version: SerializedThread::VERSION.to_string(),
summary: self.summary,
updated_at: self.updated_at,
messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
initial_project_snapshot: self.initial_project_snapshot,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct LegacySerializedMessage {
pub id: MessageId,
pub role: Role,
pub text: String,
#[serde(default)]
pub tool_uses: Vec<SerializedToolUse>,
#[serde(default)]
pub tool_results: Vec<SerializedToolResult>,
}
impl LegacySerializedMessage {
fn upgrade(self) -> SerializedMessage {
SerializedMessage {
id: self.id,
role: self.role,
segments: vec![SerializedMessageSegment::Text { text: self.text }],
tool_uses: self.tool_uses,
tool_results: self.tool_results,
}
}
}
struct GlobalThreadsDatabase( struct GlobalThreadsDatabase(
Shared<BoxFuture<'static, Result<Arc<ThreadsDatabase>, Arc<anyhow::Error>>>>, Shared<BoxFuture<'static, Result<Arc<ThreadsDatabase>, Arc<anyhow::Error>>>>,
); );
@@ -384,25 +288,7 @@ impl Global for GlobalThreadsDatabase {}
pub(crate) struct ThreadsDatabase { pub(crate) struct ThreadsDatabase {
executor: BackgroundExecutor, executor: BackgroundExecutor,
env: heed::Env, env: heed::Env,
threads: Database<SerdeBincode<ThreadId>, SerializedThread>, threads: Database<SerdeBincode<ThreadId>, SerdeJson<SerializedThread>>,
}
impl heed::BytesEncode<'_> for SerializedThread {
type EItem = SerializedThread;
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, heed::BoxedError> {
serde_json::to_vec(item).map(Cow::Owned).map_err(Into::into)
}
}
impl<'a> heed::BytesDecode<'a> for SerializedThread {
type DItem = SerializedThread;
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
// We implement this type manually because we want to call `SerializedThread::from_json`,
// instead of the Deserialize trait implementation for `SerializedThread`.
SerializedThread::from_json(bytes).map_err(Into::into)
}
} }
impl ThreadsDatabase { impl ThreadsDatabase {

View File

@@ -1,38 +1,23 @@
use std::sync::Arc; use std::sync::Arc;
use assistant_settings::{AgentProfile, AssistantSettings};
use assistant_tool::{ToolSource, ToolWorkingSet}; use assistant_tool::{ToolSource, ToolWorkingSet};
use gpui::{Entity, Subscription}; use gpui::Entity;
use indexmap::IndexMap; use scripting_tool::ScriptingTool;
use settings::{Settings as _, SettingsStore};
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip}; use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use crate::agent_profile::AgentProfile;
pub struct ToolSelector { pub struct ToolSelector {
profiles: IndexMap<Arc<str>, AgentProfile>, profiles: Vec<AgentProfile>,
tools: Arc<ToolWorkingSet>, tools: Arc<ToolWorkingSet>,
_subscriptions: Vec<Subscription>,
} }
impl ToolSelector { impl ToolSelector {
pub fn new(tools: Arc<ToolWorkingSet>, cx: &mut Context<Self>) -> Self { pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
let settings_subscription = cx.observe_global::<SettingsStore>(move |this, cx| { Self {
this.refresh_profiles(cx); profiles: vec![AgentProfile::read_only(), AgentProfile::code_writer()],
});
let mut this = Self {
profiles: IndexMap::default(),
tools, tools,
_subscriptions: vec![settings_subscription], }
};
this.refresh_profiles(cx);
this
}
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
let settings = AssistantSettings::get_global(cx);
self.profiles = settings.profiles.clone();
} }
fn build_context_menu( fn build_context_menu(
@@ -46,12 +31,11 @@ impl ToolSelector {
let icon_position = IconPosition::End; let icon_position = IconPosition::End;
menu = menu.header("Profiles"); menu = menu.header("Profiles");
for (_id, profile) in profiles.clone() { for profile in profiles.clone() {
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, { menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
let tools = tool_set.clone(); let tools = tool_set.clone();
move |_window, cx| { move |_window, cx| {
tools.disable_all_tools(cx); tools.disable_source(ToolSource::Native, cx);
tools.enable( tools.enable(
ToolSource::Native, ToolSource::Native,
&profile &profile
@@ -60,19 +44,6 @@ impl ToolSelector {
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone())) .filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
); );
for (context_server_id, preset) in &profile.context_servers {
tools.enable(
ToolSource::ContextServer {
id: context_server_id.clone().into(),
},
&preset
.tools
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
)
}
} }
}); });
} }
@@ -106,6 +77,11 @@ impl ToolSelector {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if ToolSource::Native == source { if ToolSource::Native == source {
tools.push((
ToolSource::Native,
ScriptingTool::NAME.into(),
tool_set.is_scripting_tool_enabled(),
));
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b)); tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
} }
@@ -139,10 +115,18 @@ impl ToolSelector {
menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, { menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, {
let tools = tool_set.clone(); let tools = tool_set.clone();
move |_window, _cx| { move |_window, _cx| {
if is_enabled { if name.as_ref() == ScriptingTool::NAME {
tools.disable(source.clone(), &[name.clone()]); if is_enabled {
tools.disable_scripting_tool();
} else {
tools.enable_scripting_tool();
}
} else { } else {
tools.enable(source.clone(), &[name.clone()]); if is_enabled {
tools.disable(source.clone(), &[name.clone()]);
} else {
tools.enable(source.clone(), &[name.clone()]);
}
} }
} }
}); });

View File

@@ -1,16 +1,14 @@
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use assistant_tool::{Tool, ToolWorkingSet};
use collections::HashMap; use collections::HashMap;
use futures::future::Shared; use futures::future::Shared;
use futures::FutureExt as _; use futures::FutureExt as _;
use gpui::{App, SharedString, Task}; use gpui::{SharedString, Task};
use language_model::{ use language_model::{
LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse, LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse,
LanguageModelToolUseId, MessageContent, Role, LanguageModelToolUseId, MessageContent, Role,
}; };
use ui::IconName;
use crate::thread::MessageId; use crate::thread::MessageId;
use crate::thread_store::SerializedMessage; use crate::thread_store::SerializedMessage;
@@ -19,15 +17,12 @@ use crate::thread_store::SerializedMessage;
pub struct ToolUse { pub struct ToolUse {
pub id: LanguageModelToolUseId, pub id: LanguageModelToolUseId,
pub name: SharedString, pub name: SharedString,
pub ui_text: SharedString,
pub status: ToolUseStatus, pub status: ToolUseStatus,
pub input: serde_json::Value, pub input: serde_json::Value,
pub icon: ui::IconName,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ToolUseStatus { pub enum ToolUseStatus {
NeedsConfirmation,
Pending, Pending,
Running, Running,
Finished(SharedString), Finished(SharedString),
@@ -35,7 +30,6 @@ pub enum ToolUseStatus {
} }
pub struct ToolUseState { pub struct ToolUseState {
tools: Arc<ToolWorkingSet>,
tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>, tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>,
tool_uses_by_user_message: HashMap<MessageId, Vec<LanguageModelToolUseId>>, tool_uses_by_user_message: HashMap<MessageId, Vec<LanguageModelToolUseId>>,
tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>, tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>,
@@ -43,9 +37,8 @@ pub struct ToolUseState {
} }
impl ToolUseState { impl ToolUseState {
pub fn new(tools: Arc<ToolWorkingSet>) -> Self { pub fn new() -> Self {
Self { Self {
tools,
tool_uses_by_assistant_message: HashMap::default(), tool_uses_by_assistant_message: HashMap::default(),
tool_uses_by_user_message: HashMap::default(), tool_uses_by_user_message: HashMap::default(),
tool_results: HashMap::default(), tool_results: HashMap::default(),
@@ -57,11 +50,10 @@ impl ToolUseState {
/// ///
/// Accepts a function to filter the tools that should be used to populate the state. /// Accepts a function to filter the tools that should be used to populate the state.
pub fn from_serialized_messages( pub fn from_serialized_messages(
tools: Arc<ToolWorkingSet>,
messages: &[SerializedMessage], messages: &[SerializedMessage],
mut filter_by_tool_name: impl FnMut(&str) -> bool, mut filter_by_tool_name: impl FnMut(&str) -> bool,
) -> Self { ) -> Self {
let mut this = Self::new(tools); let mut this = Self::new();
let mut tool_names_by_id = HashMap::default(); let mut tool_names_by_id = HashMap::default();
for message in messages { for message in messages {
@@ -146,7 +138,7 @@ impl ToolUseState {
self.pending_tool_uses_by_id.values().collect() self.pending_tool_uses_by_id.values().collect()
} }
pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec<ToolUse> { pub fn tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else { let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else {
return Vec::new(); return Vec::new();
}; };
@@ -166,53 +158,29 @@ impl ToolUseState {
} }
if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) { if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) {
match pending_tool_use.status { return match pending_tool_use.status {
PendingToolUseStatus::Idle => ToolUseStatus::Pending, PendingToolUseStatus::Idle => ToolUseStatus::Pending,
PendingToolUseStatus::NeedsConfirmation { .. } => {
ToolUseStatus::NeedsConfirmation
}
PendingToolUseStatus::Running { .. } => ToolUseStatus::Running, PendingToolUseStatus::Running { .. } => ToolUseStatus::Running,
PendingToolUseStatus::Error(ref err) => { PendingToolUseStatus::Error(ref err) => {
ToolUseStatus::Error(err.clone().into()) ToolUseStatus::Error(err.clone().into())
} }
} };
} else {
ToolUseStatus::Pending
} }
})();
let icon = if let Some(tool) = self.tools.tool(&tool_use.name, cx) { ToolUseStatus::Pending
tool.icon() })();
} else {
IconName::Cog
};
tool_uses.push(ToolUse { tool_uses.push(ToolUse {
id: tool_use.id.clone(), id: tool_use.id.clone(),
name: tool_use.name.clone().into(), name: tool_use.name.clone().into(),
ui_text: self.tool_ui_label(&tool_use.name, &tool_use.input, cx),
input: tool_use.input.clone(), input: tool_use.input.clone(),
status, status,
icon,
}) })
} }
tool_uses tool_uses
} }
pub fn tool_ui_label(
&self,
tool_name: &str,
input: &serde_json::Value,
cx: &App,
) -> SharedString {
if let Some(tool) = self.tools.tool(tool_name, cx) {
tool.ui_text(input).into()
} else {
"Unknown tool".into()
}
}
pub fn tool_results_for_message(&self, message_id: MessageId) -> Vec<&LanguageModelToolResult> { pub fn tool_results_for_message(&self, message_id: MessageId) -> Vec<&LanguageModelToolResult> {
let empty = Vec::new(); let empty = Vec::new();
@@ -241,7 +209,6 @@ impl ToolUseState {
&mut self, &mut self,
assistant_message_id: MessageId, assistant_message_id: MessageId,
tool_use: LanguageModelToolUse, tool_use: LanguageModelToolUse,
cx: &App,
) { ) {
self.tool_uses_by_assistant_message self.tool_uses_by_assistant_message
.entry(assistant_message_id) .entry(assistant_message_id)
@@ -261,52 +228,21 @@ impl ToolUseState {
PendingToolUse { PendingToolUse {
assistant_message_id, assistant_message_id,
id: tool_use.id, id: tool_use.id,
name: tool_use.name.clone(), name: tool_use.name,
ui_text: self
.tool_ui_label(&tool_use.name, &tool_use.input, cx)
.into(),
input: tool_use.input, input: tool_use.input,
status: PendingToolUseStatus::Idle, status: PendingToolUseStatus::Idle,
}, },
); );
} }
pub fn run_pending_tool( pub fn run_pending_tool(&mut self, tool_use_id: LanguageModelToolUseId, task: Task<()>) {
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: SharedString,
task: Task<()>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) { if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
tool_use.ui_text = ui_text.into();
tool_use.status = PendingToolUseStatus::Running { tool_use.status = PendingToolUseStatus::Running {
_task: task.shared(), _task: task.shared(),
}; };
} }
} }
pub fn confirm_tool_use(
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: impl Into<Arc<str>>,
input: serde_json::Value,
messages: Arc<Vec<LanguageModelRequestMessage>>,
tool: Arc<dyn Tool>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
let ui_text = ui_text.into();
tool_use.ui_text = ui_text.clone();
let confirmation = Confirmation {
tool_use_id,
input,
messages,
tool,
ui_text,
};
tool_use.status = PendingToolUseStatus::NeedsConfirmation(Arc::new(confirmation));
}
}
pub fn insert_tool_output( pub fn insert_tool_output(
&mut self, &mut self,
tool_use_id: LanguageModelToolUseId, tool_use_id: LanguageModelToolUseId,
@@ -399,24 +335,13 @@ pub struct PendingToolUse {
#[allow(unused)] #[allow(unused)]
pub assistant_message_id: MessageId, pub assistant_message_id: MessageId,
pub name: Arc<str>, pub name: Arc<str>,
pub ui_text: Arc<str>,
pub input: serde_json::Value, pub input: serde_json::Value,
pub status: PendingToolUseStatus, pub status: PendingToolUseStatus,
} }
#[derive(Debug, Clone)]
pub struct Confirmation {
pub tool_use_id: LanguageModelToolUseId,
pub input: serde_json::Value,
pub ui_text: Arc<str>,
pub messages: Arc<Vec<LanguageModelRequestMessage>>,
pub tool: Arc<dyn Tool>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum PendingToolUseStatus { pub enum PendingToolUseStatus {
Idle, Idle,
NeedsConfirmation(Arc<Confirmation>),
Running { _task: Shared<Task<()>> }, Running { _task: Shared<Task<()>> },
Error(#[allow(unused)] Arc<str>), Error(#[allow(unused)] Arc<str>),
} }
@@ -429,8 +354,4 @@ impl PendingToolUseStatus {
pub fn is_error(&self) -> bool { pub fn is_error(&self) -> bool {
matches!(self, PendingToolUseStatus::Error(_)) matches!(self, PendingToolUseStatus::Error(_))
} }
pub fn needs_confirmation(&self) -> bool {
matches!(self, PendingToolUseStatus::NeedsConfirmation { .. })
}
} }

View File

@@ -1,5 +1,3 @@
mod context_pill; mod context_pill;
mod tool_ready_pop_up;
pub use context_pill::*; pub use context_pill::*;
pub use tool_ready_pop_up::*;

View File

@@ -1,119 +0,0 @@
use gpui::{
point, App, Context, EventEmitter, IntoElement, PlatformDisplay, Size, Window,
WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, WindowOptions,
};
use release_channel::ReleaseChannel;
use std::rc::Rc;
use theme;
use ui::{prelude::*, Render};
pub struct ToolReadyPopUp {
caption: SharedString,
icon: IconName,
icon_color: Color,
}
impl ToolReadyPopUp {
pub fn new(caption: impl Into<SharedString>, icon: IconName, icon_color: Color) -> Self {
Self {
caption: caption.into(),
icon,
icon_color,
}
}
pub fn window_options(screen: Rc<dyn PlatformDisplay>, cx: &App) -> WindowOptions {
let size = Size {
width: px(440.),
height: px(72.),
};
let notification_margin_width = px(16.);
let notification_margin_height = px(-48.);
let bounds = gpui::Bounds::<Pixels> {
origin: screen.bounds().top_right()
- point(
size.width + notification_margin_width,
notification_margin_height,
),
size,
};
let app_id = ReleaseChannel::global(cx).app_id();
WindowOptions {
window_bounds: Some(WindowBounds::Windowed(bounds)),
titlebar: None,
focus: false,
show: true,
kind: WindowKind::PopUp,
is_movable: false,
display_id: Some(screen.id()),
window_background: WindowBackgroundAppearance::Transparent,
app_id: Some(app_id.to_owned()),
window_min_size: None,
window_decorations: Some(WindowDecorations::Client),
}
}
}
pub enum ToolReadyPopupEvent {
Accepted,
Dismissed,
}
impl EventEmitter<ToolReadyPopupEvent> for ToolReadyPopUp {}
impl Render for ToolReadyPopUp {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let ui_font = theme::setup_ui_font(window, cx);
let line_height = window.line_height();
h_flex()
.size_full()
.p_3()
.gap_4()
.justify_between()
.elevation_3(cx)
.text_ui(cx)
.font(ui_font)
.border_color(cx.theme().colors().border)
.rounded_xl()
.child(
h_flex()
.items_start()
.gap_2()
.child(
h_flex().h(line_height).justify_center().child(
Icon::new(self.icon)
.color(self.icon_color)
.size(IconSize::Small),
),
)
.child(
v_flex()
.child(Headline::new("Agent Panel").size(HeadlineSize::XSmall))
.child(Label::new(self.caption.clone()).color(Color::Muted)),
),
)
.child(
h_flex()
.gap_0p5()
.child(
Button::new("open", "View Panel")
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
.on_click({
cx.listener(move |_this, _event, _, cx| {
cx.emit(ToolReadyPopupEvent::Accepted);
})
}),
)
.child(Button::new("dismiss", "Dismiss").on_click({
cx.listener(move |_, _event, _, cx| {
cx.emit(ToolReadyPopupEvent::Dismissed);
})
})),
)
}
}

View File

@@ -162,11 +162,6 @@ pub enum ContextOperation {
section: SlashCommandOutputSection<language::Anchor>, section: SlashCommandOutputSection<language::Anchor>,
version: clock::Global, version: clock::Global,
}, },
ThoughtProcessOutputSectionAdded {
timestamp: clock::Lamport,
section: ThoughtProcessOutputSection<language::Anchor>,
version: clock::Global,
},
BufferOperation(language::Operation), BufferOperation(language::Operation),
} }
@@ -264,20 +259,6 @@ impl ContextOperation {
version: language::proto::deserialize_version(&message.version), version: language::proto::deserialize_version(&message.version),
}) })
} }
proto::context_operation::Variant::ThoughtProcessOutputSectionAdded(message) => {
let section = message.section.context("missing section")?;
Ok(Self::ThoughtProcessOutputSectionAdded {
timestamp: language::proto::deserialize_timestamp(
message.timestamp.context("missing timestamp")?,
),
section: ThoughtProcessOutputSection {
range: language::proto::deserialize_anchor_range(
section.range.context("invalid range")?,
)?,
},
version: language::proto::deserialize_version(&message.version),
})
}
proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation( proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
language::proto::deserialize_operation( language::proto::deserialize_operation(
op.operation.context("invalid buffer operation")?, op.operation.context("invalid buffer operation")?,
@@ -389,27 +370,6 @@ impl ContextOperation {
}, },
)), )),
}, },
Self::ThoughtProcessOutputSectionAdded {
timestamp,
section,
version,
} => proto::ContextOperation {
variant: Some(
proto::context_operation::Variant::ThoughtProcessOutputSectionAdded(
proto::context_operation::ThoughtProcessOutputSectionAdded {
timestamp: Some(language::proto::serialize_timestamp(*timestamp)),
section: Some({
proto::ThoughtProcessOutputSection {
range: Some(language::proto::serialize_anchor_range(
section.range.clone(),
)),
}
}),
version: language::proto::serialize_version(version),
},
),
),
},
Self::BufferOperation(operation) => proto::ContextOperation { Self::BufferOperation(operation) => proto::ContextOperation {
variant: Some(proto::context_operation::Variant::BufferOperation( variant: Some(proto::context_operation::Variant::BufferOperation(
proto::context_operation::BufferOperation { proto::context_operation::BufferOperation {
@@ -427,8 +387,7 @@ impl ContextOperation {
Self::UpdateSummary { summary, .. } => summary.timestamp, Self::UpdateSummary { summary, .. } => summary.timestamp,
Self::SlashCommandStarted { id, .. } => id.0, Self::SlashCommandStarted { id, .. } => id.0,
Self::SlashCommandOutputSectionAdded { timestamp, .. } Self::SlashCommandOutputSectionAdded { timestamp, .. }
| Self::SlashCommandFinished { timestamp, .. } | Self::SlashCommandFinished { timestamp, .. } => *timestamp,
| Self::ThoughtProcessOutputSectionAdded { timestamp, .. } => *timestamp,
Self::BufferOperation(_) => { Self::BufferOperation(_) => {
panic!("reading the timestamp of a buffer operation is not supported") panic!("reading the timestamp of a buffer operation is not supported")
} }
@@ -443,8 +402,7 @@ impl ContextOperation {
| Self::UpdateSummary { version, .. } | Self::UpdateSummary { version, .. }
| Self::SlashCommandStarted { version, .. } | Self::SlashCommandStarted { version, .. }
| Self::SlashCommandOutputSectionAdded { version, .. } | Self::SlashCommandOutputSectionAdded { version, .. }
| Self::SlashCommandFinished { version, .. } | Self::SlashCommandFinished { version, .. } => version,
| Self::ThoughtProcessOutputSectionAdded { version, .. } => version,
Self::BufferOperation(_) => { Self::BufferOperation(_) => {
panic!("reading the version of a buffer operation is not supported") panic!("reading the version of a buffer operation is not supported")
} }
@@ -460,8 +418,6 @@ pub enum ContextEvent {
MessagesEdited, MessagesEdited,
SummaryChanged, SummaryChanged,
StreamedCompletion, StreamedCompletion,
StartedThoughtProcess(Range<language::Anchor>),
EndedThoughtProcess(language::Anchor),
PatchesUpdated { PatchesUpdated {
removed: Vec<Range<language::Anchor>>, removed: Vec<Range<language::Anchor>>,
updated: Vec<Range<language::Anchor>>, updated: Vec<Range<language::Anchor>>,
@@ -542,17 +498,6 @@ impl MessageMetadata {
} }
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct ThoughtProcessOutputSection<T> {
pub range: Range<T>,
}
impl ThoughtProcessOutputSection<language::Anchor> {
pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool {
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
}
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Message { pub struct Message {
pub offset_range: Range<usize>, pub offset_range: Range<usize>,
@@ -635,7 +580,6 @@ pub struct AssistantContext {
edits_since_last_parse: language::Subscription, edits_since_last_parse: language::Subscription,
slash_commands: Arc<SlashCommandWorkingSet>, slash_commands: Arc<SlashCommandWorkingSet>,
slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>, slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
thought_process_output_sections: Vec<ThoughtProcessOutputSection<language::Anchor>>,
message_anchors: Vec<MessageAnchor>, message_anchors: Vec<MessageAnchor>,
contents: Vec<Content>, contents: Vec<Content>,
messages_metadata: HashMap<MessageId, MessageMetadata>, messages_metadata: HashMap<MessageId, MessageMetadata>,
@@ -738,7 +682,6 @@ impl AssistantContext {
parsed_slash_commands: Vec::new(), parsed_slash_commands: Vec::new(),
invoked_slash_commands: HashMap::default(), invoked_slash_commands: HashMap::default(),
slash_command_output_sections: Vec::new(), slash_command_output_sections: Vec::new(),
thought_process_output_sections: Vec::new(),
edits_since_last_parse: edits_since_last_slash_command_parse, edits_since_last_parse: edits_since_last_slash_command_parse,
summary: None, summary: None,
pending_summary: Task::ready(None), pending_summary: Task::ready(None),
@@ -821,18 +764,6 @@ impl AssistantContext {
} }
}) })
.collect(), .collect(),
thought_process_output_sections: self
.thought_process_output_sections
.iter()
.filter_map(|section| {
if section.is_valid(buffer) {
let range = section.range.to_offset(buffer);
Some(ThoughtProcessOutputSection { range })
} else {
None
}
})
.collect(),
} }
} }
@@ -1026,16 +957,6 @@ impl AssistantContext {
cx.emit(ContextEvent::SlashCommandOutputSectionAdded { section }); cx.emit(ContextEvent::SlashCommandOutputSectionAdded { section });
} }
} }
ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => {
let buffer = self.buffer.read(cx);
if let Err(ix) = self
.thought_process_output_sections
.binary_search_by(|probe| probe.range.cmp(&section.range, buffer))
{
self.thought_process_output_sections
.insert(ix, section.clone());
}
}
ContextOperation::SlashCommandFinished { ContextOperation::SlashCommandFinished {
id, id,
error_message, error_message,
@@ -1099,9 +1020,6 @@ impl AssistantContext {
ContextOperation::SlashCommandOutputSectionAdded { section, .. } => { ContextOperation::SlashCommandOutputSectionAdded { section, .. } => {
self.has_received_operations_for_anchor_range(section.range.clone(), cx) self.has_received_operations_for_anchor_range(section.range.clone(), cx)
} }
ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => {
self.has_received_operations_for_anchor_range(section.range.clone(), cx)
}
ContextOperation::SlashCommandFinished { .. } => true, ContextOperation::SlashCommandFinished { .. } => true,
ContextOperation::BufferOperation(_) => { ContextOperation::BufferOperation(_) => {
panic!("buffer operations should always be applied") panic!("buffer operations should always be applied")
@@ -1210,12 +1128,6 @@ impl AssistantContext {
&self.slash_command_output_sections &self.slash_command_output_sections
} }
pub fn thought_process_output_sections(
&self,
) -> &[ThoughtProcessOutputSection<language::Anchor>] {
&self.thought_process_output_sections
}
pub fn contains_files(&self, cx: &App) -> bool { pub fn contains_files(&self, cx: &App) -> bool {
let buffer = self.buffer.read(cx); let buffer = self.buffer.read(cx);
self.slash_command_output_sections.iter().any(|section| { self.slash_command_output_sections.iter().any(|section| {
@@ -2256,35 +2168,6 @@ impl AssistantContext {
); );
} }
fn insert_thought_process_output_section(
&mut self,
section: ThoughtProcessOutputSection<language::Anchor>,
cx: &mut Context<Self>,
) {
let buffer = self.buffer.read(cx);
let insertion_ix = match self
.thought_process_output_sections
.binary_search_by(|probe| probe.range.cmp(&section.range, buffer))
{
Ok(ix) | Err(ix) => ix,
};
self.thought_process_output_sections
.insert(insertion_ix, section.clone());
// cx.emit(ContextEvent::ThoughtProcessOutputSectionAdded {
// section: section.clone(),
// });
let version = self.version.clone();
let timestamp = self.next_timestamp();
self.push_op(
ContextOperation::ThoughtProcessOutputSectionAdded {
timestamp,
section,
version,
},
cx,
);
}
pub fn completion_provider_changed(&mut self, cx: &mut Context<Self>) { pub fn completion_provider_changed(&mut self, cx: &mut Context<Self>) {
self.count_remaining_tokens(cx); self.count_remaining_tokens(cx);
} }
@@ -2337,10 +2220,6 @@ impl AssistantContext {
let request_start = Instant::now(); let request_start = Instant::now();
let mut events = stream.await?; let mut events = stream.await?;
let mut stop_reason = StopReason::EndTurn; let mut stop_reason = StopReason::EndTurn;
let mut thought_process_stack = Vec::new();
const THOUGHT_PROCESS_START_MARKER: &str = "<think>\n";
const THOUGHT_PROCESS_END_MARKER: &str = "\n</think>";
while let Some(event) = events.next().await { while let Some(event) = events.next().await {
if response_latency.is_none() { if response_latency.is_none() {
@@ -2348,9 +2227,6 @@ impl AssistantContext {
} }
let event = event?; let event = event?;
let mut context_event = None;
let mut thought_process_output_section = None;
this.update(cx, |this, cx| { this.update(cx, |this, cx| {
let message_ix = this let message_ix = this
.message_anchors .message_anchors
@@ -2369,50 +2245,7 @@ impl AssistantContext {
LanguageModelCompletionEvent::Stop(reason) => { LanguageModelCompletionEvent::Stop(reason) => {
stop_reason = reason; stop_reason = reason;
} }
LanguageModelCompletionEvent::Thinking(chunk) => { LanguageModelCompletionEvent::Text(chunk) => {
if thought_process_stack.is_empty() {
let start =
buffer.anchor_before(message_old_end_offset);
thought_process_stack.push(start);
let chunk =
format!("{THOUGHT_PROCESS_START_MARKER}{chunk}{THOUGHT_PROCESS_END_MARKER}");
let chunk_len = chunk.len();
buffer.edit(
[(
message_old_end_offset..message_old_end_offset,
chunk,
)],
None,
cx,
);
let end = buffer
.anchor_before(message_old_end_offset + chunk_len);
context_event = Some(
ContextEvent::StartedThoughtProcess(start..end),
);
} else {
// This ensures that all the thinking chunks are inserted inside the thinking tag
let insertion_position =
message_old_end_offset - THOUGHT_PROCESS_END_MARKER.len();
buffer.edit(
[(insertion_position..insertion_position, chunk)],
None,
cx,
);
}
}
LanguageModelCompletionEvent::Text(mut chunk) => {
if let Some(start) = thought_process_stack.pop() {
let end = buffer.anchor_before(message_old_end_offset);
context_event =
Some(ContextEvent::EndedThoughtProcess(end));
thought_process_output_section =
Some(ThoughtProcessOutputSection {
range: start..end,
});
chunk.insert_str(0, "\n\n");
}
buffer.edit( buffer.edit(
[( [(
message_old_end_offset..message_old_end_offset, message_old_end_offset..message_old_end_offset,
@@ -2427,13 +2260,6 @@ impl AssistantContext {
} }
}); });
if let Some(section) = thought_process_output_section.take() {
this.insert_thought_process_output_section(section, cx);
}
if let Some(context_event) = context_event.take() {
cx.emit(context_event);
}
cx.emit(ContextEvent::StreamedCompletion); cx.emit(ContextEvent::StreamedCompletion);
Some(()) Some(())
@@ -3301,8 +3127,6 @@ pub struct SavedContext {
pub summary: String, pub summary: String,
pub slash_command_output_sections: pub slash_command_output_sections:
Vec<assistant_slash_command::SlashCommandOutputSection<usize>>, Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
#[serde(default)]
pub thought_process_output_sections: Vec<ThoughtProcessOutputSection<usize>>,
} }
impl SavedContext { impl SavedContext {
@@ -3404,20 +3228,6 @@ impl SavedContext {
version.observe(timestamp); version.observe(timestamp);
} }
for section in self.thought_process_output_sections {
let timestamp = next_timestamp.tick();
operations.push(ContextOperation::ThoughtProcessOutputSectionAdded {
timestamp,
section: ThoughtProcessOutputSection {
range: buffer.anchor_after(section.range.start)
..buffer.anchor_before(section.range.end),
},
version: version.clone(),
});
version.observe(timestamp);
}
let timestamp = next_timestamp.tick(); let timestamp = next_timestamp.tick();
operations.push(ContextOperation::UpdateSummary { operations.push(ContextOperation::UpdateSummary {
summary: ContextSummary { summary: ContextSummary {
@@ -3492,7 +3302,6 @@ impl SavedContextV0_3_0 {
.collect(), .collect(),
summary: self.summary, summary: self.summary,
slash_command_output_sections: self.slash_command_output_sections, slash_command_output_sections: self.slash_command_output_sections,
thought_process_output_sections: Vec::new(),
} }
} }
} }

View File

@@ -13,7 +13,7 @@ use editor::{
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata, BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
}, },
scroll::Autoscroll, scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorEvent, MenuInlineCompletionsPolicy, ProposedChangeLocation, Anchor, Editor, EditorEvent, MenuInlineCompletionsPolicy, ProposedChangeLocation,
ProposedChangesEditor, RowExt, ToOffset as _, ToPoint, ProposedChangesEditor, RowExt, ToOffset as _, ToPoint,
}; };
@@ -64,10 +64,7 @@ use workspace::{
Workspace, Workspace,
}; };
use crate::{ use crate::{slash_command::SlashCommandCompletionProvider, slash_command_picker};
slash_command::SlashCommandCompletionProvider, slash_command_picker,
ThoughtProcessOutputSection,
};
use crate::{ use crate::{
AssistantContext, AssistantPatch, AssistantPatchStatus, CacheStatus, Content, ContextEvent, AssistantContext, AssistantPatch, AssistantPatchStatus, CacheStatus, Content, ContextEvent,
ContextId, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId, ContextId, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId,
@@ -123,11 +120,6 @@ enum AssistError {
Message(SharedString), Message(SharedString),
} }
pub enum ThoughtProcessStatus {
Pending,
Completed,
}
pub trait AssistantPanelDelegate { pub trait AssistantPanelDelegate {
fn active_context_editor( fn active_context_editor(
&self, &self,
@@ -186,7 +178,6 @@ pub struct ContextEditor {
project: Entity<Project>, project: Entity<Project>,
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>, lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
editor: Entity<Editor>, editor: Entity<Editor>,
pending_thought_process: Option<(CreaseId, language::Anchor)>,
blocks: HashMap<MessageId, (MessageHeader, CustomBlockId)>, blocks: HashMap<MessageId, (MessageHeader, CustomBlockId)>,
image_blocks: HashSet<CustomBlockId>, image_blocks: HashSet<CustomBlockId>,
scroll_position: Option<ScrollPosition>, scroll_position: Option<ScrollPosition>,
@@ -262,8 +253,7 @@ impl ContextEditor {
cx.observe_global_in::<SettingsStore>(window, Self::settings_changed), cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
]; ];
let slash_command_sections = context.read(cx).slash_command_output_sections().to_vec(); let sections = context.read(cx).slash_command_output_sections().to_vec();
let thought_process_sections = context.read(cx).thought_process_output_sections().to_vec();
let patch_ranges = context.read(cx).patch_ranges().collect::<Vec<_>>(); let patch_ranges = context.read(cx).patch_ranges().collect::<Vec<_>>();
let slash_commands = context.read(cx).slash_commands().clone(); let slash_commands = context.read(cx).slash_commands().clone();
let mut this = Self { let mut this = Self {
@@ -275,7 +265,6 @@ impl ContextEditor {
image_blocks: Default::default(), image_blocks: Default::default(),
scroll_position: None, scroll_position: None,
remote_id: None, remote_id: None,
pending_thought_process: None,
fs: fs.clone(), fs: fs.clone(),
workspace, workspace,
project, project,
@@ -305,14 +294,7 @@ impl ContextEditor {
}; };
this.update_message_headers(cx); this.update_message_headers(cx);
this.update_image_blocks(cx); this.update_image_blocks(cx);
this.insert_slash_command_output_sections(slash_command_sections, false, window, cx); this.insert_slash_command_output_sections(sections, false, window, cx);
this.insert_thought_process_output_sections(
thought_process_sections
.into_iter()
.map(|section| (section, ThoughtProcessStatus::Completed)),
window,
cx,
);
this.patches_updated(&Vec::new(), &patch_ranges, window, cx); this.patches_updated(&Vec::new(), &patch_ranges, window, cx);
this this
} }
@@ -414,9 +396,12 @@ impl ContextEditor {
cursor..cursor cursor..cursor
}; };
self.editor.update(cx, |editor, cx| { self.editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), window, cx, |selections| { editor.change_selections(
selections.select_ranges([new_selection]) Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
}); window,
cx,
|selections| selections.select_ranges([new_selection]),
);
}); });
// Avoid scrolling to the new cursor position so the assistant's output is stable. // Avoid scrolling to the new cursor position so the assistant's output is stable.
cx.defer_in(window, |this, _, _| this.scroll_position = None); cx.defer_in(window, |this, _, _| this.scroll_position = None);
@@ -614,47 +599,6 @@ impl ContextEditor {
context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx); context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
}); });
} }
ContextEvent::StartedThoughtProcess(range) => {
let creases = self.insert_thought_process_output_sections(
[(
ThoughtProcessOutputSection {
range: range.clone(),
},
ThoughtProcessStatus::Pending,
)],
window,
cx,
);
self.pending_thought_process = Some((creases[0], range.start));
}
ContextEvent::EndedThoughtProcess(end) => {
if let Some((crease_id, start)) = self.pending_thought_process.take() {
self.editor.update(cx, |editor, cx| {
let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
let (excerpt_id, _, _) = multi_buffer_snapshot.as_singleton().unwrap();
let start_anchor = multi_buffer_snapshot
.anchor_in_excerpt(*excerpt_id, start)
.unwrap();
editor.display_map.update(cx, |display_map, cx| {
display_map.unfold_intersecting(
vec![start_anchor..start_anchor],
true,
cx,
);
});
editor.remove_creases(vec![crease_id], cx);
});
self.insert_thought_process_output_sections(
[(
ThoughtProcessOutputSection { range: start..*end },
ThoughtProcessStatus::Completed,
)],
window,
cx,
);
}
}
ContextEvent::StreamedCompletion => { ContextEvent::StreamedCompletion => {
self.editor.update(cx, |editor, cx| { self.editor.update(cx, |editor, cx| {
if let Some(scroll_position) = self.scroll_position { if let Some(scroll_position) = self.scroll_position {
@@ -1002,62 +946,6 @@ impl ContextEditor {
self.update_active_patch(window, cx); self.update_active_patch(window, cx);
} }
fn insert_thought_process_output_sections(
&mut self,
sections: impl IntoIterator<
Item = (
ThoughtProcessOutputSection<language::Anchor>,
ThoughtProcessStatus,
),
>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Vec<CreaseId> {
self.editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx).snapshot(cx);
let excerpt_id = *buffer.as_singleton().unwrap().0;
let mut buffer_rows_to_fold = BTreeSet::new();
let mut creases = Vec::new();
for (section, status) in sections {
let start = buffer
.anchor_in_excerpt(excerpt_id, section.range.start)
.unwrap();
let end = buffer
.anchor_in_excerpt(excerpt_id, section.range.end)
.unwrap();
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
buffer_rows_to_fold.insert(buffer_row);
creases.push(
Crease::inline(
start..end,
FoldPlaceholder {
render: render_thought_process_fold_icon_button(
cx.entity().downgrade(),
status,
),
merge_adjacent: false,
..Default::default()
},
render_slash_command_output_toggle,
|_, _, _, _| Empty.into_any_element(),
)
.with_metadata(CreaseMetadata {
icon: IconName::Ai,
label: "Thinking Process".into(),
}),
);
}
let creases = editor.insert_creases(creases, cx);
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
editor.fold_at(&FoldAt { buffer_row }, window, cx);
}
creases
})
}
fn insert_slash_command_output_sections( fn insert_slash_command_output_sections(
&mut self, &mut self,
sections: impl IntoIterator<Item = SlashCommandOutputSection<language::Anchor>>, sections: impl IntoIterator<Item = SlashCommandOutputSection<language::Anchor>>,
@@ -2764,52 +2652,6 @@ fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Opti
None None
} }
fn render_thought_process_fold_icon_button(
editor: WeakEntity<Editor>,
status: ThoughtProcessStatus,
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
Arc::new(move |fold_id, fold_range, _cx| {
let editor = editor.clone();
let button = ButtonLike::new(fold_id).layer(ElevationIndex::ElevatedSurface);
let button = match status {
ThoughtProcessStatus::Pending => button
.child(
Icon::new(IconName::Brain)
.size(IconSize::Small)
.color(Color::Muted),
)
.child(
Label::new("Thinking…").color(Color::Muted).with_animation(
"pulsating-label",
Animation::new(Duration::from_secs(2))
.repeat()
.with_easing(pulsating_between(0.4, 0.8)),
|label, delta| label.alpha(delta),
),
),
ThoughtProcessStatus::Completed => button
.style(ButtonStyle::Filled)
.child(Icon::new(IconName::Brain).size(IconSize::Small))
.child(Label::new("Thought Process").single_line()),
};
button
.on_click(move |_, window, cx| {
editor
.update(cx, |editor, cx| {
let buffer_start = fold_range
.start
.to_point(&editor.buffer().read(cx).read(cx));
let buffer_row = MultiBufferRow(buffer_start.row);
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
})
.ok();
})
.into_any_element()
})
}
fn render_fold_icon_button( fn render_fold_icon_button(
editor: WeakEntity<Editor>, editor: WeakEntity<Editor>,
icon: IconName, icon: IconName,

View File

@@ -2,7 +2,7 @@ use crate::context_editor::ContextEditor;
use anyhow::Result; use anyhow::Result;
pub use assistant_slash_command::SlashCommand; pub use assistant_slash_command::SlashCommand;
use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWorkingSet}; use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWorkingSet};
use editor::{CompletionProvider, Editor, ExcerptId}; use editor::{CompletionProvider, Editor};
use fuzzy::{match_strings, StringMatchCandidate}; use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window}; use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window};
use language::{Anchor, Buffer, ToPoint}; use language::{Anchor, Buffer, ToPoint};
@@ -126,7 +126,6 @@ impl SlashCommandCompletionProvider {
)), )),
new_text, new_text,
label: command.label(cx), label: command.label(cx),
icon_path: None,
confirm, confirm,
source: CompletionSource::Custom, source: CompletionSource::Custom,
}) })
@@ -224,7 +223,6 @@ impl SlashCommandCompletionProvider {
last_argument_range.clone() last_argument_range.clone()
}, },
label: new_argument.label, label: new_argument.label,
icon_path: None,
new_text, new_text,
documentation: None, documentation: None,
confirm, confirm,
@@ -243,7 +241,6 @@ impl SlashCommandCompletionProvider {
impl CompletionProvider for SlashCommandCompletionProvider { impl CompletionProvider for SlashCommandCompletionProvider {
fn completions( fn completions(
&self, &self,
_excerpt_id: ExcerptId,
buffer: &Entity<Buffer>, buffer: &Entity<Buffer>,
buffer_position: Anchor, buffer_position: Anchor,
_: editor::CompletionContext, _: editor::CompletionContext,

View File

@@ -79,25 +79,10 @@ impl Eval {
let start_time = std::time::SystemTime::now(); let start_time = std::time::SystemTime::now();
let (system_prompt_context, load_error) = cx
.update(|cx| {
assistant
.read(cx)
.thread
.read(cx)
.load_system_prompt_context(cx)
})?
.await;
if let Some(load_error) = load_error {
return Err(anyhow!("{:?}", load_error));
};
assistant.update(cx, |assistant, cx| { assistant.update(cx, |assistant, cx| {
assistant.thread.update(cx, |thread, cx| { assistant.thread.update(cx, |thread, cx| {
let context = vec![]; let context = vec![];
thread.insert_user_message(self.user_prompt.clone(), context, None, cx); thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
thread.set_system_prompt_context(system_prompt_context);
thread.send_to_model(model, RequestKind::Chat, cx); thread.send_to_model(model, RequestKind::Chat, cx);
}); });
})?; })?;
@@ -120,7 +105,7 @@ impl Eval {
.count(); .count();
Ok(EvalOutput { Ok(EvalOutput {
diff, diff,
last_message: last_message.to_string(), last_message: last_message.text.clone(),
elapsed_time, elapsed_time,
assistant_response_count, assistant_response_count,
tool_use_counts: assistant.tool_use_counts.clone(), tool_use_counts: assistant.tool_use_counts.clone(),

View File

@@ -89,7 +89,7 @@ impl HeadlessAssistant {
ThreadEvent::DoneStreaming => { ThreadEvent::DoneStreaming => {
let thread = thread.read(cx); let thread = thread.read(cx);
if let Some(message) = thread.messages().last() { if let Some(message) = thread.messages().last() {
println!("Message: {}", message.to_string()); println!("Message: {}", message.text,);
} }
if thread.all_tools_finished() { if thread.all_tools_finished() {
self.done_tx.send_blocking(Ok(())).unwrap() self.done_tx.send_blocking(Ok(())).unwrap()
@@ -128,7 +128,12 @@ impl HeadlessAssistant {
} }
} }
} }
_ => {} ThreadEvent::StreamedCompletion
| ThreadEvent::SummaryChanged
| ThreadEvent::StreamedAssistantText(_, _)
| ThreadEvent::MessageAdded(_)
| ThreadEvent::MessageEdited(_)
| ThreadEvent::MessageDeleted(_) => {}
} }
} }
} }

View File

@@ -16,7 +16,6 @@ anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true anyhow.workspace = true
feature_flags.workspace = true feature_flags.workspace = true
gpui.workspace = true gpui.workspace = true
indexmap.workspace = true
language_model.workspace = true language_model.workspace = true
lmstudio = { workspace = true, features = ["schemars"] } lmstudio = { workspace = true, features = ["schemars"] }
log.workspace = true log.workspace = true

View File

@@ -1,18 +0,0 @@
use std::sync::Arc;
use gpui::SharedString;
use indexmap::IndexMap;
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: IndexMap<Arc<str>, bool>,
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
}
#[derive(Debug, Clone)]
pub struct ContextServerPreset {
pub tools: IndexMap<Arc<str>, bool>,
}

View File

@@ -1,5 +1,3 @@
mod agent_profile;
use std::sync::Arc; use std::sync::Arc;
use ::open_ai::Model as OpenAiModel; use ::open_ai::Model as OpenAiModel;
@@ -7,7 +5,6 @@ use anthropic::Model as AnthropicModel;
use deepseek::Model as DeepseekModel; use deepseek::Model as DeepseekModel;
use feature_flags::FeatureFlagAppExt; use feature_flags::FeatureFlagAppExt;
use gpui::{App, Pixels}; use gpui::{App, Pixels};
use indexmap::IndexMap;
use language_model::{CloudModel, LanguageModel}; use language_model::{CloudModel, LanguageModel};
use lmstudio::Model as LmStudioModel; use lmstudio::Model as LmStudioModel;
use ollama::Model as OllamaModel; use ollama::Model as OllamaModel;
@@ -15,8 +12,6 @@ use schemars::{schema::Schema, JsonSchema};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources}; use settings::{Settings, SettingsSources};
pub use crate::agent_profile::*;
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)] #[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
pub enum AssistantDockPosition { pub enum AssistantDockPosition {
@@ -71,9 +66,6 @@ pub struct AssistantSettings {
pub inline_alternatives: Vec<LanguageModelSelection>, pub inline_alternatives: Vec<LanguageModelSelection>,
pub using_outdated_settings_version: bool, pub using_outdated_settings_version: bool,
pub enable_experimental_live_diffs: bool, pub enable_experimental_live_diffs: bool,
pub profiles: IndexMap<Arc<str>, AgentProfile>,
pub always_allow_tool_actions: bool,
pub notify_when_agent_waiting: bool,
} }
impl AssistantSettings { impl AssistantSettings {
@@ -174,9 +166,6 @@ impl AssistantSettingsContent {
editor_model: None, editor_model: None,
inline_alternatives: None, inline_alternatives: None,
enable_experimental_live_diffs: None, enable_experimental_live_diffs: None,
profiles: None,
always_allow_tool_actions: None,
notify_when_agent_waiting: None,
}, },
VersionedAssistantSettingsContent::V2(settings) => settings.clone(), VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
}, },
@@ -198,9 +187,6 @@ impl AssistantSettingsContent {
editor_model: None, editor_model: None,
inline_alternatives: None, inline_alternatives: None,
enable_experimental_live_diffs: None, enable_experimental_live_diffs: None,
profiles: None,
always_allow_tool_actions: None,
notify_when_agent_waiting: None,
}, },
} }
} }
@@ -330,9 +316,6 @@ impl Default for VersionedAssistantSettingsContent {
editor_model: None, editor_model: None,
inline_alternatives: None, inline_alternatives: None,
enable_experimental_live_diffs: None, enable_experimental_live_diffs: None,
profiles: None,
always_allow_tool_actions: None,
notify_when_agent_waiting: None,
}) })
} }
} }
@@ -369,17 +352,6 @@ pub struct AssistantSettingsContentV2 {
/// ///
/// Default: false /// Default: false
enable_experimental_live_diffs: Option<bool>, enable_experimental_live_diffs: Option<bool>,
#[schemars(skip)]
profiles: Option<IndexMap<Arc<str>, AgentProfileContent>>,
/// Whenever a tool action would normally wait for your confirmation
/// that you allow it, always choose to allow it.
///
/// Default: false
always_allow_tool_actions: Option<bool>,
/// Whether to show a popup notification when the agent is waiting for user input.
///
/// Default: true
notify_when_agent_waiting: Option<bool>,
} }
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
@@ -416,19 +388,6 @@ impl Default for LanguageModelSelection {
} }
} }
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AgentProfileContent {
pub name: Arc<str>,
pub tools: IndexMap<Arc<str>, bool>,
#[serde(default)]
pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
pub struct ContextServerPresetContent {
pub tools: IndexMap<Arc<str>, bool>,
}
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] #[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContentV1 { pub struct AssistantSettingsContentV1 {
/// Whether the Assistant is enabled. /// Whether the Assistant is enabled.
@@ -523,40 +482,6 @@ impl Settings for AssistantSettings {
&mut settings.enable_experimental_live_diffs, &mut settings.enable_experimental_live_diffs,
value.enable_experimental_live_diffs, value.enable_experimental_live_diffs,
); );
merge(
&mut settings.always_allow_tool_actions,
value.always_allow_tool_actions,
);
merge(
&mut settings.notify_when_agent_waiting,
value.notify_when_agent_waiting,
);
if let Some(profiles) = value.profiles {
settings
.profiles
.extend(profiles.into_iter().map(|(id, profile)| {
(
id,
AgentProfile {
name: profile.name.into(),
tools: profile.tools,
context_servers: profile
.context_servers
.into_iter()
.map(|(context_server_id, preset)| {
(
context_server_id,
ContextServerPreset {
tools: preset.tools.clone(),
},
)
})
.collect(),
},
)
}));
}
} }
Ok(settings) Ok(settings)
@@ -621,9 +546,6 @@ mod tests {
default_width: None, default_width: None,
default_height: None, default_height: None,
enable_experimental_live_diffs: None, enable_experimental_live_diffs: None,
profiles: None,
always_allow_tool_actions: None,
notify_when_agent_waiting: None,
}), }),
) )
}, },

View File

@@ -13,11 +13,10 @@ path = "src/assistant_tool.rs"
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
clock.workspace = true
collections.workspace = true collections.workspace = true
clock.workspace = true
derive_more.workspace = true derive_more.workspace = true
gpui.workspace = true gpui.workspace = true
icons.workspace = true
language.workspace = true language.workspace = true
language_model.workspace = true language_model.workspace = true
parking_lot.workspace = true parking_lot.workspace = true

View File

@@ -1,13 +1,12 @@
mod tool_registry; mod tool_registry;
mod tool_working_set; mod tool_working_set;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use gpui::{App, Context, Entity, SharedString, Task}; use gpui::Context;
use icons::IconName; use gpui::{App, Entity, SharedString, Task};
use language::Buffer; use language::Buffer;
use language_model::LanguageModelRequestMessage; use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
@@ -35,26 +34,16 @@ pub trait Tool: 'static + Send + Sync {
/// Returns the description of the tool. /// Returns the description of the tool.
fn description(&self) -> String; fn description(&self) -> String;
/// Returns the icon for the tool.
fn icon(&self) -> IconName;
/// Returns the source of the tool. /// Returns the source of the tool.
fn source(&self) -> ToolSource { fn source(&self) -> ToolSource {
ToolSource::Native ToolSource::Native
} }
/// Returns true iff the tool needs the users's confirmation
/// before having permission to run.
fn needs_confirmation(&self) -> bool;
/// Returns the JSON schema that describes the tool's input. /// Returns the JSON schema that describes the tool's input.
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
serde_json::Value::Object(serde_json::Map::default()) serde_json::Value::Object(serde_json::Map::default())
} }
/// Returns markdown to be displayed in the UI for this tool.
fn ui_text(&self, input: &serde_json::Value) -> String;
/// Runs the tool with the provided input. /// Runs the tool with the provided input.
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
@@ -66,12 +55,6 @@ pub trait Tool: 'static + Send + Sync {
) -> Task<Result<String>>; ) -> Task<Result<String>>;
} }
impl Debug for dyn Tool {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_struct("Tool").field("name", &self.name()).finish()
}
}
/// Tracks actions performed by tools in a thread /// Tracks actions performed by tools in a thread
#[derive(Debug)] #[derive(Debug)]
pub struct ActionLog { pub struct ActionLog {
@@ -80,8 +63,6 @@ pub struct ActionLog {
stale_buffers_in_context: HashSet<Entity<Buffer>>, stale_buffers_in_context: HashSet<Entity<Buffer>>,
/// Buffers that we want to notify the model about when they change. /// Buffers that we want to notify the model about when they change.
tracked_buffers: HashMap<Entity<Buffer>, TrackedBuffer>, tracked_buffers: HashMap<Entity<Buffer>, TrackedBuffer>,
/// Has the model edited a file since it last checked diagnostics?
edited_since_project_diagnostics_check: bool,
} }
#[derive(Debug, Default)] #[derive(Debug, Default)]
@@ -95,7 +76,6 @@ impl ActionLog {
Self { Self {
stale_buffers_in_context: HashSet::default(), stale_buffers_in_context: HashSet::default(),
tracked_buffers: HashMap::default(), tracked_buffers: HashMap::default(),
edited_since_project_diagnostics_check: false,
} }
} }
@@ -113,12 +93,6 @@ impl ActionLog {
} }
self.stale_buffers_in_context.extend(buffers); self.stale_buffers_in_context.extend(buffers);
self.edited_since_project_diagnostics_check = true;
}
/// Notifies a diagnostics check
pub fn checked_project_diagnostics(&mut self) {
self.edited_since_project_diagnostics_check = false;
} }
/// Iterate over buffers changed since last read or edited by the model /// Iterate over buffers changed since last read or edited by the model
@@ -129,11 +103,6 @@ impl ActionLog {
.map(|(buffer, _)| buffer) .map(|(buffer, _)| buffer)
} }
/// Returns true if any files have been edited since the last project diagnostics check
pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
self.edited_since_project_diagnostics_check
}
/// Takes and returns the set of buffers pending refresh, clearing internal state. /// Takes and returns the set of buffers pending refresh, clearing internal state.
pub fn take_stale_buffers_in_context(&mut self) -> HashSet<Entity<Buffer>> { pub fn take_stale_buffers_in_context(&mut self) -> HashSet<Entity<Buffer>> {
std::mem::take(&mut self.stale_buffers_in_context) std::mem::take(&mut self.stale_buffers_in_context)

View File

@@ -15,14 +15,26 @@ pub struct ToolWorkingSet {
state: Mutex<WorkingSetState>, state: Mutex<WorkingSetState>,
} }
#[derive(Default)]
struct WorkingSetState { struct WorkingSetState {
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>, context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>, context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
disabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>, disabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
is_scripting_tool_disabled: bool,
next_tool_id: ToolId, next_tool_id: ToolId,
} }
impl Default for WorkingSetState {
fn default() -> Self {
Self {
context_server_tools_by_id: HashMap::default(),
context_server_tools_by_name: HashMap::default(),
disabled_tools_by_source: HashMap::default(),
is_scripting_tool_disabled: true,
next_tool_id: ToolId::default(),
}
}
}
impl ToolWorkingSet { impl ToolWorkingSet {
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> { pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
self.state self.state
@@ -43,7 +55,7 @@ impl ToolWorkingSet {
pub fn are_all_tools_enabled(&self) -> bool { pub fn are_all_tools_enabled(&self) -> bool {
let state = self.state.lock(); let state = self.state.lock();
state.disabled_tools_by_source.is_empty() state.disabled_tools_by_source.is_empty() && !state.is_scripting_tool_disabled
} }
pub fn are_all_tools_from_source_enabled(&self, source: &ToolSource) -> bool { pub fn are_all_tools_from_source_enabled(&self, source: &ToolSource) -> bool {
@@ -58,6 +70,7 @@ impl ToolWorkingSet {
pub fn enable_all_tools(&self) { pub fn enable_all_tools(&self) {
let mut state = self.state.lock(); let mut state = self.state.lock();
state.disabled_tools_by_source.clear(); state.disabled_tools_by_source.clear();
state.enable_scripting_tool();
} }
pub fn disable_all_tools(&self, cx: &App) { pub fn disable_all_tools(&self, cx: &App) {
@@ -111,6 +124,21 @@ impl ToolWorkingSet {
.retain(|id, _| !tool_ids_to_remove.contains(id)); .retain(|id, _| !tool_ids_to_remove.contains(id));
state.tools_changed(); state.tools_changed();
} }
pub fn is_scripting_tool_enabled(&self) -> bool {
let state = self.state.lock();
!state.is_scripting_tool_disabled
}
pub fn enable_scripting_tool(&self) {
let mut state = self.state.lock();
state.enable_scripting_tool();
}
pub fn disable_scripting_tool(&self) {
let mut state = self.state.lock();
state.disable_scripting_tool();
}
} }
impl WorkingSetState { impl WorkingSetState {
@@ -212,5 +240,15 @@ impl WorkingSetState {
self.disable(source, &tool_names); self.disable(source, &tool_names);
} }
self.disable_scripting_tool();
}
fn enable_scripting_tool(&mut self) {
self.is_scripting_tool_disabled = false;
}
fn disable_scripting_tool(&mut self) {
self.is_scripting_tool_disabled = true;
} }
} }

View File

@@ -1,37 +1,26 @@
mod bash_tool; mod bash_tool;
mod copy_path_tool;
mod create_directory_tool;
mod create_file_tool;
mod delete_path_tool; mod delete_path_tool;
mod diagnostics_tool; mod diagnostics_tool;
mod edit_files_tool; mod edit_files_tool;
mod fetch_tool; mod fetch_tool;
mod find_replace_file_tool;
mod list_directory_tool; mod list_directory_tool;
mod move_path_tool;
mod now_tool; mod now_tool;
mod path_search_tool; mod path_search_tool;
mod read_file_tool; mod read_file_tool;
mod regex_search_tool; mod regex_search_tool;
mod replace;
mod thinking_tool; mod thinking_tool;
use std::sync::Arc; use std::sync::Arc;
use assistant_tool::ToolRegistry; use assistant_tool::ToolRegistry;
use copy_path_tool::CopyPathTool;
use gpui::App; use gpui::App;
use http_client::HttpClientWithUrl; use http_client::HttpClientWithUrl;
use move_path_tool::MovePathTool;
use crate::bash_tool::BashTool; use crate::bash_tool::BashTool;
use crate::create_directory_tool::CreateDirectoryTool;
use crate::create_file_tool::CreateFileTool;
use crate::delete_path_tool::DeletePathTool; use crate::delete_path_tool::DeletePathTool;
use crate::diagnostics_tool::DiagnosticsTool; use crate::diagnostics_tool::DiagnosticsTool;
use crate::edit_files_tool::EditFilesTool; use crate::edit_files_tool::EditFilesTool;
use crate::fetch_tool::FetchTool; use crate::fetch_tool::FetchTool;
use crate::find_replace_file_tool::FindReplaceFileTool;
use crate::list_directory_tool::ListDirectoryTool; use crate::list_directory_tool::ListDirectoryTool;
use crate::now_tool::NowTool; use crate::now_tool::NowTool;
use crate::path_search_tool::PathSearchTool; use crate::path_search_tool::PathSearchTool;
@@ -45,12 +34,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
let registry = ToolRegistry::global(cx); let registry = ToolRegistry::global(cx);
registry.register_tool(BashTool); registry.register_tool(BashTool);
registry.register_tool(CreateDirectoryTool);
registry.register_tool(CreateFileTool);
registry.register_tool(CopyPathTool);
registry.register_tool(DeletePathTool); registry.register_tool(DeletePathTool);
registry.register_tool(FindReplaceFileTool);
registry.register_tool(MovePathTool);
registry.register_tool(DiagnosticsTool); registry.register_tool(DiagnosticsTool);
registry.register_tool(EditFilesTool); registry.register_tool(EditFilesTool);
registry.register_tool(ListDirectoryTool); registry.register_tool(ListDirectoryTool);

View File

@@ -6,9 +6,7 @@ use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use ui::IconName;
use util::command::new_smol_command; use util::command::new_smol_command;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct BashToolInput { pub struct BashToolInput {
@@ -25,37 +23,15 @@ impl Tool for BashTool {
"bash".to_string() "bash".to_string()
} }
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./bash_tool/description.md").to_string() include_str!("./bash_tool/description.md").to_string()
} }
fn icon(&self) -> IconName {
IconName::Terminal
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(BashToolInput); let schema = schemars::schema_for!(BashToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<BashToolInput>(input.clone()) {
Ok(input) => {
let cmd = MarkdownString::escape(&input.command);
if input.command.contains('\n') {
format!("```bash\n{cmd}\n```")
} else {
format!("`{cmd}`")
}
}
Err(_) => "Run bash command".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,

View File

@@ -1,120 +0,0 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CopyPathToolInput {
/// The source path of the file or directory to copy.
/// If a directory is specified, its contents will be copied recursively (like `cp -r`).
///
/// <example>
/// If the project has the following files:
///
/// - directory1/a/something.txt
/// - directory2/a/things.txt
/// - directory3/a/other.txt
///
/// You can copy the first file by providing a source_path of "directory1/a/something.txt"
/// </example>
pub source_path: String,
/// The destination path where the file or directory should be copied to.
///
/// <example>
/// To copy "directory1/a/something.txt" to "directory2/b/copy.txt",
/// provide a destination_path of "directory2/b/copy.txt"
/// </example>
pub destination_path: String,
}
pub struct CopyPathTool;
impl Tool for CopyPathTool {
fn name(&self) -> String {
"copy-path".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./copy_path_tool/description.md").into()
}
fn icon(&self) -> IconName {
IconName::Clipboard
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(CopyPathToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<CopyPathToolInput>(input.clone()) {
Ok(input) => {
let src = MarkdownString::escape(&input.source_path);
let dest = MarkdownString::escape(&input.destination_path);
format!("Copy `{src}` to `{dest}`")
}
Err(_) => "Copy path".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<CopyPathToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let copy_task = project.update(cx, |project, cx| {
match project
.find_project_path(&input.source_path, cx)
.and_then(|project_path| project.entry_for_path(&project_path, cx))
{
Some(entity) => match project.find_project_path(&input.destination_path, cx) {
Some(project_path) => {
project.copy_entry(entity.id, None, project_path.path, cx)
}
None => Task::ready(Err(anyhow!(
"Destination path {} was outside the project.",
input.destination_path
))),
},
None => Task::ready(Err(anyhow!(
"Source path {} was not found in the project.",
input.source_path
))),
}
});
cx.background_spawn(async move {
match copy_task.await {
Ok(_) => Ok(format!(
"Copied {} to {}",
input.source_path, input.destination_path
)),
Err(err) => Err(anyhow!(
"Failed to copy {} to {}: {}",
input.source_path,
input.destination_path,
err
)),
}
})
}
}

View File

@@ -1,6 +0,0 @@
Copies a file or directory in the project, and returns confirmation that the copy succeeded.
Directory contents will be copied recursively (like `cp -r`).
This tool should be used when it's desirable to create a copy of a file or directory without modifying the original.
It's much more efficient than doing this by separately reading and then writing the file or directory's contents,
so this tool should be preferred over that approach whenever copying is the goal.

View File

@@ -1,89 +0,0 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CreateDirectoryToolInput {
/// The path of the new directory.
///
/// <example>
/// If the project has the following structure:
///
/// - directory1/
/// - directory2/
///
/// You can create a new directory by providing a path of "directory1/new_directory"
/// </example>
pub path: String,
}
pub struct CreateDirectoryTool;
impl Tool for CreateDirectoryTool {
fn name(&self) -> String {
"create-directory".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./create_directory_tool/description.md").into()
}
fn icon(&self) -> IconName {
IconName::Folder
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(CreateDirectoryToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<CreateDirectoryToolInput>(input.clone()) {
Ok(input) => {
format!("Create directory `{}`", MarkdownString::escape(&input.path))
}
Err(_) => "Create directory".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<CreateDirectoryToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let project_path = match project.read(cx).find_project_path(&input.path, cx) {
Some(project_path) => project_path,
None => return Task::ready(Err(anyhow!("Path to create was outside the project"))),
};
let destination_path: Arc<str> = input.path.as_str().into();
cx.spawn(async move |cx| {
project
.update(cx, |project, cx| {
project.create_entry(project_path.clone(), true, cx)
})?
.await
.map_err(|err| anyhow!("Unable to create directory {destination_path}: {err}"))?;
Ok(format!("Created directory {destination_path}"))
})
}
}

View File

@@ -1,3 +0,0 @@
Creates a new directory at the specified path within the project. Returns confirmation that the directory was created.
This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project.

View File

@@ -1,112 +0,0 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct CreateFileToolInput {
/// The path where the file should be created.
///
/// <example>
/// If the project has the following structure:
///
/// - directory1/
/// - directory2/
///
/// You can create a new file by providing a path of "directory1/new_file.txt"
/// </example>
pub path: String,
/// The text contents of the file to create.
///
/// <example>
/// To create a file with the text "Hello, World!", provide contents of "Hello, World!"
/// </example>
pub contents: String,
}
pub struct CreateFileTool;
impl Tool for CreateFileTool {
fn name(&self) -> String {
"create-file".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./create_file_tool/description.md").into()
}
fn icon(&self) -> IconName {
IconName::FileCreate
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(CreateFileToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<CreateFileToolInput>(input.clone()) {
Ok(input) => {
let path = MarkdownString::escape(&input.path);
format!("Create file `{path}`")
}
Err(_) => "Create file".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<CreateFileToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let project_path = match project.read(cx).find_project_path(&input.path, cx) {
Some(project_path) => project_path,
None => return Task::ready(Err(anyhow!("Path to create was outside the project"))),
};
let contents: Arc<str> = input.contents.as_str().into();
let destination_path: Arc<str> = input.path.as_str().into();
cx.spawn(async move |cx| {
project
.update(cx, |project, cx| {
project.create_entry(project_path.clone(), false, cx)
})?
.await
.map_err(|err| anyhow!("Unable to create {destination_path}: {err}"))?;
let buffer = project
.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
})?
.await
.map_err(|err| anyhow!("Unable to open buffer for {destination_path}: {err}"))?;
buffer.update(cx, |buffer, cx| {
buffer.set_text(contents, cx);
})?;
project
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
.await
.map_err(|err| anyhow!("Unable to save buffer for {destination_path}: {err}"))?;
Ok(format!("Created file {destination_path}"))
})
}
}

View File

@@ -1,3 +0,0 @@
Creates a new file at the specified path within the project, containing the given text content. Returns confirmation that the file was created.
This tool is the most efficient way to create new files within the project, so it should always be chosen whenever it's necessary to create a new file in the project with specific text content, or whenever a file in the project needs such a drastic change that you would prefer to replace the entire thing instead of making individual edits. This tool should not be used when making changes to parts of an existing file but not all of it. In those cases, it's better to use another approach to edit the file.

View File

@@ -6,7 +6,6 @@ use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use ui::IconName;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DeletePathToolInput { pub struct DeletePathToolInput {
@@ -31,30 +30,15 @@ impl Tool for DeletePathTool {
"delete-path".into() "delete-path".into()
} }
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./delete_path_tool/description.md").into() include_str!("./delete_path_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::FileDelete
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(DeletePathToolInput); let schema = schemars::schema_for!(DeletePathToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<DeletePathToolInput>(input.clone()) {
Ok(input) => format!("Delete “`{}`”", input.path),
Err(_) => "Delete path".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -75,12 +59,13 @@ impl Tool for DeletePathTool {
{ {
Some(deletion_task) => cx.background_spawn(async move { Some(deletion_task) => cx.background_spawn(async move {
match deletion_task.await { match deletion_task.await {
Ok(()) => Ok(format!("Deleted {path_str}")), Ok(()) => Ok(format!("Deleted {}", &path_str)),
Err(err) => Err(anyhow!("Failed to delete {path_str}: {err}")), Err(err) => Err(anyhow!("Failed to delete {}: {}", &path_str, err)),
} }
}), }),
None => Task::ready(Err(anyhow!( None => Task::ready(Err(anyhow!(
"Couldn't delete {path_str} because that path isn't in this project." "Couldn't delete {} because that path isn't in this project.",
path_str
))), ))),
} }
} }

View File

@@ -6,9 +6,11 @@ use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{fmt::Write, path::Path, sync::Arc}; use std::{
use ui::IconName; fmt::Write,
use util::markdown::MarkdownString; path::{Path, PathBuf},
sync::Arc,
};
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DiagnosticsToolInput { pub struct DiagnosticsToolInput {
@@ -25,17 +27,7 @@ pub struct DiagnosticsToolInput {
/// ///
/// If you wanna access diagnostics for `dolor.txt` in `ipsum`, you should use the path `ipsum/dolor.txt`. /// If you wanna access diagnostics for `dolor.txt` in `ipsum`, you should use the path `ipsum/dolor.txt`.
/// </example> /// </example>
#[serde(deserialize_with = "deserialize_path")] pub path: Option<PathBuf>,
pub path: Option<String>,
}
fn deserialize_path<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
where
D: serde::Deserializer<'de>,
{
let opt = Option::<String>::deserialize(deserializer)?;
// The model passes an empty string sometimes
Ok(opt.filter(|s| !s.is_empty()))
} }
pub struct DiagnosticsTool; pub struct DiagnosticsTool;
@@ -45,120 +37,91 @@ impl Tool for DiagnosticsTool {
"diagnostics".into() "diagnostics".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./diagnostics_tool/description.md").into() include_str!("./diagnostics_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::Warning
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(DiagnosticsToolInput); let schema = schemars::schema_for!(DiagnosticsToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
if let Some(path) = serde_json::from_value::<DiagnosticsToolInput>(input.clone())
.ok()
.and_then(|input| match input.path {
Some(path) if !path.is_empty() => Some(MarkdownString::escape(&path)),
_ => None,
})
{
format!("Check diagnostics for `{path}`")
} else {
"Check project diagnostics".to_string()
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
_messages: &[LanguageModelRequestMessage], _messages: &[LanguageModelRequestMessage],
project: Entity<Project>, project: Entity<Project>,
action_log: Entity<ActionLog>, _action_log: Entity<ActionLog>,
cx: &mut App, cx: &mut App,
) -> Task<Result<String>> { ) -> Task<Result<String>> {
match serde_json::from_value::<DiagnosticsToolInput>(input) let input = match serde_json::from_value::<DiagnosticsToolInput>(input) {
.ok() Ok(input) => input,
.and_then(|input| input.path) Err(err) => return Task::ready(Err(anyhow!(err))),
{ };
Some(path) if !path.is_empty() => {
let Some(project_path) = project.read(cx).find_project_path(&path, cx) else {
return Task::ready(Err(anyhow!("Could not find path {path} in project",)));
};
let buffer = if let Some(path) = input.path {
project.update(cx, |project, cx| project.open_buffer(project_path, cx)); let Some(project_path) = project.read(cx).find_project_path(&path, cx) else {
return Task::ready(Err(anyhow!("Could not find path in project")));
};
let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx));
cx.spawn(async move |cx| { cx.spawn(async move |cx| {
let mut output = String::new();
let buffer = buffer.await?;
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];
let range = entry.range.to_point(&snapshot);
let severity = match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => "error",
DiagnosticSeverity::WARNING => "warning",
_ => continue,
};
writeln!(
output,
"{} at line {}: {}",
severity,
range.start.row + 1,
entry.diagnostic.message
)?;
}
if output.is_empty() {
Ok("File doesn't have errors or warnings!".to_string())
} else {
Ok(output)
}
})
}
_ => {
let project = project.read(cx);
let mut output = String::new(); let mut output = String::new();
let mut has_diagnostics = false; let buffer = buffer.await?;
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { for (_, group) in snapshot.diagnostic_groups(None) {
if summary.error_count > 0 || summary.warning_count > 0 { let entry = &group.entries[group.primary_ix];
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) let range = entry.range.to_point(&snapshot);
else { let severity = match entry.diagnostic.severity {
continue; DiagnosticSeverity::ERROR => "error",
}; DiagnosticSeverity::WARNING => "warning",
_ => continue,
};
has_diagnostics = true; writeln!(
output.push_str(&format!( output,
"{}: {} error(s), {} warning(s)\n", "{} at line {}: {}",
Path::new(worktree.read(cx).root_name()) severity,
.join(project_path.path) range.start.row + 1,
.display(), entry.diagnostic.message
summary.error_count, )?;
summary.warning_count
));
}
} }
action_log.update(cx, |action_log, _cx| { if output.is_empty() {
action_log.checked_project_diagnostics(); Ok("File doesn't have errors or warnings!".to_string())
});
if has_diagnostics {
Task::ready(Ok(output))
} else { } else {
Task::ready(Ok("No errors or warnings found in the project.".to_string())) Ok(output)
} }
})
} else {
let project = project.read(cx);
let mut output = String::new();
let mut has_diagnostics = false;
for (project_path, _, summary) in project.diagnostic_summaries(true, cx) {
if summary.error_count > 0 || summary.warning_count > 0 {
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx)
else {
continue;
};
has_diagnostics = true;
output.push_str(&format!(
"{}: {} error(s), {} warning(s)\n",
Path::new(worktree.read(cx).root_name())
.join(project_path.path)
.display(),
summary.error_count,
summary.warning_count
));
}
}
if has_diagnostics {
Task::ready(Ok(output))
} else {
Task::ready(Ok("No errors or warnings found in the project.".to_string()))
} }
} }
} }

View File

@@ -14,5 +14,3 @@ To get diagnostics for a specific file:
To get a project-wide diagnostic summary: To get a project-wide diagnostic summary:
{} {}
</example> </example>
IMPORTANT: When you're done making changes, you **MUST** get the **project** diagnostics (input: `{}`) at the end of your edits so you can fix any problems you might have introduced. **DO NOT** tell the user you're done before doing this!

View File

@@ -1,30 +1,33 @@
mod edit_action; mod edit_action;
pub mod log; pub mod log;
mod replace;
use crate::replace::{replace_exact, replace_with_flexible_indent};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use assistant_tool::{ActionLog, Tool}; use assistant_tool::{ActionLog, Tool};
use collections::HashSet; use collections::HashSet;
use edit_action::{EditAction, EditActionParser}; use edit_action::{EditAction, EditActionParser};
use futures::{channel::mpsc, SinkExt, StreamExt}; use futures::StreamExt;
use gpui::{App, AppContext, AsyncApp, Entity, Task}; use gpui::{App, AsyncApp, Entity, Task};
use language_model::{ use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
}; };
use log::{EditToolLog, EditToolRequestId}; use log::{EditToolLog, EditToolRequestId};
use project::Project; use project::Project;
use replace::{replace_exact, replace_with_flexible_indent};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Write; use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use ui::IconName;
use util::ResultExt; use util::ResultExt;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct EditFilesToolInput { pub struct EditFilesToolInput {
/// High-level edit instructions. These will be interpreted by a smaller /// High-level edit instructions. These will be interpreted by a smaller
/// model, so explain the changes you want that model to make and which /// model, so explain the changes you want that model to make and which
/// file paths need changing. The description should be concise and clear. /// file paths need changing.
///
/// The description should be concise and clear. We will show this
/// description to the user as well.
/// ///
/// WARNING: When specifying which file paths need changing, you MUST /// WARNING: When specifying which file paths need changing, you MUST
/// start each path with one of the project's root directories. /// start each path with one of the project's root directories.
@@ -55,21 +58,6 @@ pub struct EditFilesToolInput {
/// Notice how we never specify code snippets in the instructions! /// Notice how we never specify code snippets in the instructions!
/// </example> /// </example>
pub edit_instructions: String, pub edit_instructions: String,
/// A user-friendly description of what changes are being made.
/// This will be shown to the user in the UI to describe the edit operation. The screen real estate for this UI will be extremely
/// constrained, so make the description extremely terse.
///
/// <example>
/// For fixing a broken authentication system:
/// "Fix auth bug in login flow"
/// </example>
///
/// <example>
/// For adding unit tests to a module:
/// "Add tests for user profile logic"
/// </example>
pub display_description: String,
} }
pub struct EditFilesTool; pub struct EditFilesTool;
@@ -79,30 +67,15 @@ impl Tool for EditFilesTool {
"edit-files".into() "edit-files".into()
} }
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./edit_files_tool/description.md").into() include_str!("./edit_files_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::Pencil
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(EditFilesToolInput); let schema = schemars::schema_for!(EditFilesToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<EditFilesToolInput>(input.clone()) {
Ok(input) => input.display_description,
Err(_) => "Edit files".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -153,45 +126,24 @@ impl Tool for EditFilesTool {
struct EditToolRequest { struct EditToolRequest {
parser: EditActionParser, parser: EditActionParser,
editor_response: EditorResponse, output: String,
changed_buffers: HashSet<Entity<language::Buffer>>,
bad_searches: Vec<BadSearch>,
project: Entity<Project>, project: Entity<Project>,
action_log: Entity<ActionLog>, action_log: Entity<ActionLog>,
tool_log: Option<(Entity<EditToolLog>, EditToolRequestId)>, tool_log: Option<(Entity<EditToolLog>, EditToolRequestId)>,
} }
enum EditorResponse {
/// The editor model hasn't produced any actions yet.
/// If we don't have any by the end, we'll return its message to the architect model.
Message(String),
/// The editor model produced at least one action.
Actions {
applied: Vec<AppliedAction>,
search_errors: Vec<SearchError>,
},
}
struct AppliedAction {
source: String,
buffer: Entity<language::Buffer>,
}
#[derive(Debug)] #[derive(Debug)]
enum DiffResult { enum DiffResult {
BadSearch(BadSearch),
Diff(language::Diff), Diff(language::Diff),
SearchError(SearchError),
} }
#[derive(Debug)] #[derive(Debug)]
enum SearchError { struct BadSearch {
NoMatch { file_path: String,
file_path: String, search: String,
search: String,
},
EmptyBuffer {
file_path: String,
search: String,
exists: bool,
},
} }
impl EditToolRequest { impl EditToolRequest {
@@ -243,36 +195,24 @@ impl EditToolRequest {
temperature: Some(0.0), temperature: Some(0.0),
}; };
let (mut tx, mut rx) = mpsc::channel::<String>(32);
let stream = model.stream_completion_text(llm_request, &cx); let stream = model.stream_completion_text(llm_request, &cx);
let reader_task = cx.background_spawn(async move { let mut chunks = stream.await?;
let mut chunks = stream.await?;
while let Some(chunk) = chunks.stream.next().await {
if let Some(chunk) = chunk.log_err() {
// we don't process here because the API fails
// if we take too long between reads
tx.send(chunk).await?
}
}
tx.close().await?;
anyhow::Ok(())
});
let mut request = Self { let mut request = Self {
parser: EditActionParser::new(), parser: EditActionParser::new(),
editor_response: EditorResponse::Message(String::with_capacity(256)), // we start with the success header so we don't need to shift the output in the common case
output: Self::SUCCESS_OUTPUT_HEADER.to_string(),
changed_buffers: HashSet::default(),
bad_searches: Vec::new(),
action_log, action_log,
project, project,
tool_log, tool_log,
}; };
while let Some(chunk) = rx.next().await { while let Some(chunk) = chunks.stream.next().await {
request.process_response_chunk(&chunk, cx).await?; request.process_response_chunk(&chunk?, cx).await?;
} }
reader_task.await?;
request.finalize(cx).await request.finalize(cx).await
}) })
} }
@@ -280,12 +220,6 @@ impl EditToolRequest {
async fn process_response_chunk(&mut self, chunk: &str, cx: &mut AsyncApp) -> Result<()> { async fn process_response_chunk(&mut self, chunk: &str, cx: &mut AsyncApp) -> Result<()> {
let new_actions = self.parser.parse_chunk(chunk); let new_actions = self.parser.parse_chunk(chunk);
if let EditorResponse::Message(ref mut message) = self.editor_response {
if new_actions.is_empty() {
message.push_str(chunk);
}
}
if let Some((ref log, req_id)) = self.tool_log { if let Some((ref log, req_id)) = self.tool_log {
log.update(cx, |log, cx| { log.update(cx, |log, cx| {
log.push_editor_response_chunk(req_id, chunk, &new_actions, cx) log.push_editor_response_chunk(req_id, chunk, &new_actions, cx)
@@ -336,45 +270,18 @@ impl EditToolRequest {
}?; }?;
match result { match result {
DiffResult::SearchError(error) => { DiffResult::BadSearch(invalid_replace) => {
self.push_search_error(error); self.bad_searches.push(invalid_replace);
} }
DiffResult::Diff(diff) => { DiffResult::Diff(diff) => {
let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?; let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
self.push_applied_action(AppliedAction { source, buffer }); write!(&mut self.output, "\n\n{}", source)?;
self.changed_buffers.insert(buffer);
} }
} }
anyhow::Ok(()) Ok(())
}
fn push_search_error(&mut self, error: SearchError) {
match &mut self.editor_response {
EditorResponse::Message(_) => {
self.editor_response = EditorResponse::Actions {
applied: Vec::new(),
search_errors: vec![error],
};
}
EditorResponse::Actions { search_errors, .. } => {
search_errors.push(error);
}
}
}
fn push_applied_action(&mut self, action: AppliedAction) {
match &mut self.editor_response {
EditorResponse::Message(_) => {
self.editor_response = EditorResponse::Actions {
applied: vec![action],
search_errors: Vec::new(),
};
}
EditorResponse::Actions { applied, .. } => {
applied.push(action);
}
}
} }
async fn replace_diff( async fn replace_diff(
@@ -383,171 +290,119 @@ impl EditToolRequest {
file_path: std::path::PathBuf, file_path: std::path::PathBuf,
snapshot: language::BufferSnapshot, snapshot: language::BufferSnapshot,
) -> Result<DiffResult> { ) -> Result<DiffResult> {
if snapshot.is_empty() { let result =
let exists = snapshot
.file()
.map_or(false, |file| file.disk_state().exists());
let error = SearchError::EmptyBuffer {
file_path: file_path.display().to_string(),
exists,
search: old,
};
return Ok(DiffResult::SearchError(error));
}
let replace_result =
// Try to match exactly // Try to match exactly
replace_exact(&old, &new, &snapshot) replace_exact(&old, &new, &snapshot)
.await .await
// If that fails, try being flexible about indentation // If that fails, try being flexible about indentation
.or_else(|| replace_with_flexible_indent(&old, &new, &snapshot)); .or_else(|| replace_with_flexible_indent(&old, &new, &snapshot));
let Some(diff) = replace_result else { let Some(diff) = result else {
let error = SearchError::NoMatch { return anyhow::Ok(DiffResult::BadSearch(BadSearch {
search: old, search: old,
file_path: file_path.display().to_string(), file_path: file_path.display().to_string(),
}; }));
return Ok(DiffResult::SearchError(error));
}; };
Ok(DiffResult::Diff(diff)) anyhow::Ok(DiffResult::Diff(diff))
} }
const SUCCESS_OUTPUT_HEADER: &str = "Successfully applied. Here's a list of changes:";
const ERROR_OUTPUT_HEADER_NO_EDITS: &str = "I couldn't apply any edits!";
const ERROR_OUTPUT_HEADER_WITH_EDITS: &str =
"Errors occurred. First, here's a list of the edits we managed to apply:";
async fn finalize(self, cx: &mut AsyncApp) -> Result<String> { async fn finalize(self, cx: &mut AsyncApp) -> Result<String> {
match self.editor_response { let changed_buffer_count = self.changed_buffers.len();
EditorResponse::Message(message) => Err(anyhow!(
"No edits were applied! You might need to provide more context.\n\n{}",
message
)),
EditorResponse::Actions {
applied,
search_errors,
} => {
let mut output = String::with_capacity(1024);
let parse_errors = self.parser.errors(); // Save each buffer once at the end
let has_errors = !search_errors.is_empty() || !parse_errors.is_empty(); for buffer in &self.changed_buffers {
self.project
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
.await?;
}
if has_errors { self.action_log
let error_count = search_errors.len() + parse_errors.len(); .update(cx, |log, cx| log.buffer_edited(self.changed_buffers, cx))
.log_err();
if applied.is_empty() { let errors = self.parser.errors();
writeln!(
&mut output,
"{} errors occurred! No edits were applied.",
error_count,
)?;
} else {
writeln!(
&mut output,
"{} errors occurred, but {} edits were correctly applied.",
error_count,
applied.len(),
)?;
writeln!( if errors.is_empty() && self.bad_searches.is_empty() {
&mut output, if changed_buffer_count == 0 {
"# {} SEARCH/REPLACE block(s) applied:\n\nDo not re-send these since they are already applied!\n", return Err(anyhow!(
applied.len() "The instructions didn't lead to any changes. You might need to consult the file contents first."
)?; ));
} }
} else {
write!(
&mut output,
"Successfully applied! Here's a list of applied edits:"
)?;
}
let mut changed_buffers = HashSet::default(); Ok(self.output)
} else {
let mut output = self.output;
for action in applied { if output.is_empty() {
changed_buffers.insert(action.buffer); output.replace_range(
write!(&mut output, "\n\n{}", action.source)?; 0..Self::SUCCESS_OUTPUT_HEADER.len(),
} Self::ERROR_OUTPUT_HEADER_NO_EDITS,
);
} else {
output.replace_range(
0..Self::SUCCESS_OUTPUT_HEADER.len(),
Self::ERROR_OUTPUT_HEADER_WITH_EDITS,
);
}
for buffer in &changed_buffers { if !self.bad_searches.is_empty() {
self.project writeln!(
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))? &mut output,
.await?; "\n\n# {} SEARCH/REPLACE block(s) failed to match:\n",
} self.bad_searches.len()
)?;
self.action_log for replace in self.bad_searches {
.update(cx, |log, cx| log.buffer_edited(changed_buffers.clone(), cx))
.log_err();
if !search_errors.is_empty() {
writeln!( writeln!(
&mut output, &mut output,
"\n\n## {} SEARCH/REPLACE block(s) failed to match:\n", "## No exact match in: {}\n```\n{}\n```\n",
search_errors.len() replace.file_path, replace.search,
)?;
for error in search_errors {
match error {
SearchError::NoMatch { file_path, search } => {
writeln!(
&mut output,
"### No exact match in: `{}`\n```\n{}\n```\n",
file_path, search,
)?;
}
SearchError::EmptyBuffer {
file_path,
exists: true,
search,
} => {
writeln!(
&mut output,
"### No match because `{}` is empty:\n```\n{}\n```\n",
file_path, search,
)?;
}
SearchError::EmptyBuffer {
file_path,
exists: false,
search,
} => {
writeln!(
&mut output,
"### No match because `{}` does not exist:\n```\n{}\n```\n",
file_path, search,
)?;
}
}
}
write!(&mut output,
"The SEARCH section must exactly match an existing block of lines including all white \
space, comments, indentation, docstrings, etc."
)?; )?;
} }
if !parse_errors.is_empty() { write!(&mut output,
writeln!( "The SEARCH section must exactly match an existing block of lines including all white \
&mut output, space, comments, indentation, docstrings, etc."
"\n\n## {} SEARCH/REPLACE blocks failed to parse:", )?;
parse_errors.len() }
)?;
for error in parse_errors { if !errors.is_empty() {
writeln!(&mut output, "- {}", error)?; writeln!(
} &mut output,
} "\n\n# {} SEARCH/REPLACE blocks failed to parse:",
errors.len()
)?;
if has_errors { for error in errors {
writeln!(&mut output, writeln!(&mut output, "- {}", error)?;
"\n\nYou can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them.",
)?;
Err(anyhow!(output))
} else {
Ok(output)
} }
} }
if changed_buffer_count > 0 {
writeln!(
&mut output,
"\n\nThe other SEARCH/REPLACE blocks were applied successfully. Do not re-send them!",
)?;
}
writeln!(
&mut output,
"{}You can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them.",
if changed_buffer_count == 0 {
"\n\n"
} else {
""
}
)?;
Err(anyhow!(output))
} }
} }
} }

View File

@@ -3,7 +3,3 @@ Edit files in the current project by specifying instructions in natural language
When using this tool, you should suggest one coherent edit that can be made to the codebase. When using this tool, you should suggest one coherent edit that can be made to the codebase.
When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make. When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make.
You should use this tool when you want to edit a subset of a file's contents, but not the entire file. You should not use this tool when you want to replace the entire contents of a file with completely different contents, and you absolutely must never use this tool to create new files from scratch. If you ever consider using this tool to create a new file from scratch, for any reason, instead you must reconsider and choose a different approach.
DO NOT call this tool until the code to be edited appears in the conversation! You must use the `read-files` tool or ask the user to add it to context first.

View File

@@ -120,7 +120,7 @@ Break large *SEARCH/REPLACE* blocks into a series of smaller blocks that each ch
Include just the changing lines, and a few surrounding lines if needed for uniqueness. Include just the changing lines, and a few surrounding lines if needed for uniqueness.
Do not include long runs of unchanging lines in *SEARCH/REPLACE* blocks. Do not include long runs of unchanging lines in *SEARCH/REPLACE* blocks.
Only create *SEARCH/REPLACE* blocks for files that have been read! Even though the conversation includes `read-file` tool results, you *CANNOT* issue your own reads. If the conversation doesn't include the code you need to edit, ask for it to be read explicitly. Only create *SEARCH/REPLACE* blocks for files that the user has added to the chat!
To move code within a file, use 2 *SEARCH/REPLACE* blocks: 1 to delete it from its current location, 1 to insert it in the new location. To move code within a file, use 2 *SEARCH/REPLACE* blocks: 1 to delete it from its current location, 1 to insert it in the new location.

View File

@@ -1,6 +1,5 @@
use language::{BufferSnapshot, Diff, Point, ToOffset}; use language::{BufferSnapshot, Diff, Point, ToOffset};
use project::search::SearchQuery; use project::search::SearchQuery;
use std::iter;
use util::{paths::PathMatcher, ResultExt as _}; use util::{paths::PathMatcher, ResultExt as _};
/// Performs an exact string replacement in a buffer, requiring precise character-for-character matching. /// Performs an exact string replacement in a buffer, requiring precise character-for-character matching.
@@ -12,8 +11,8 @@ pub async fn replace_exact(old: &str, new: &str, snapshot: &BufferSnapshot) -> O
false, false,
true, true,
true, true,
PathMatcher::new(iter::empty::<&str>()).ok()?, PathMatcher::new(&[]).ok()?,
PathMatcher::new(iter::empty::<&str>()).ok()?, PathMatcher::new(&[]).ok()?,
None, None,
) )
.log_err()?; .log_err()?;

View File

@@ -12,8 +12,6 @@ use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
enum ContentType { enum ContentType {
@@ -115,30 +113,15 @@ impl Tool for FetchTool {
"fetch".to_string() "fetch".to_string()
} }
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./fetch_tool/description.md").to_string() include_str!("./fetch_tool/description.md").to_string()
} }
fn icon(&self) -> IconName {
IconName::Globe
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(FetchToolInput); let schema = schemars::schema_for!(FetchToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<FetchToolInput>(input.clone()) {
Ok(input) => format!("Fetch {}", MarkdownString::escape(&input.url)),
Err(_) => "Fetch URL".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,

View File

@@ -1,229 +0,0 @@
use anyhow::{anyhow, Context as _, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, path::PathBuf, sync::Arc};
use ui::IconName;
use crate::replace::replace_exact;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct FindReplaceFileToolInput {
/// The path of the file to modify.
///
/// WARNING: When specifying which file path need changing, you MUST
/// start each path with one of the project's root directories.
///
/// The following examples assume we have two root directories in the project:
/// - backend
/// - frontend
///
/// <example>
/// `backend/src/main.rs`
///
/// Notice how the file path starts with root-1. Without that, the path
/// would be ambiguous and the call would fail!
/// </example>
///
/// <example>
/// `frontend/db.js`
/// </example>
pub path: PathBuf,
/// A user-friendly markdown description of what's being replaced. This will be shown in the UI.
///
/// <example>Fix API endpoint URLs</example>
/// <example>Update copyright year in `page_footer`</example>
pub display_description: String,
/// The unique string to find in the file. This string cannot be empty;
/// if the string is empty, the tool call will fail. Remember, do not use this tool
/// to create new files from scratch, or to overwrite existing files! Use a different
/// approach if you want to do that.
///
/// If this string appears more than once in the file, this tool call will fail,
/// so it is absolutely critical that you verify ahead of time that the string
/// is unique. You can search within the file to verify this.
///
/// To make the string more likely to be unique, include a minimum of 3 lines of context
/// before the string you actually want to find, as well as a minimum of 3 lines of
/// context after the string you want to find. (These lines of context should appear
/// in the `replace` string as well.) If 3 lines of context is not enough to obtain
/// a string that appears only once in the file, then double the number of context lines
/// until the string becomes unique. (Start with 3 lines before and 3 lines after
/// though, because too much context is needlessly costly.)
///
/// Do not alter the context lines of code in any way, and make sure to preserve all
/// whitespace and indentation for all lines of code. This string must be exactly as
/// it appears in the file, because this tool will do a literal find/replace, and if
/// even one character in this string is different in any way from how it appears
/// in the file, then the tool call will fail.
///
/// <example>
/// If a file contains this code:
///
/// ```rust
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
/// // Check if user exists first
/// let user = database.find_user(user_id)?;
///
/// // This is the part we want to modify
/// if user.role == "admin" {
/// return Ok(true);
/// }
///
/// // Check other permissions
/// check_custom_permissions(user_id)
/// }
/// ```
///
/// Your find string should include at least 3 lines of context before and after the part
/// you want to change:
///
/// ```
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
/// // Check if user exists first
/// let user = database.find_user(user_id)?;
///
/// // This is the part we want to modify
/// if user.role == "admin" {
/// return Ok(true);
/// }
///
/// // Check other permissions
/// check_custom_permissions(user_id)
/// }
/// ```
///
/// And your replace string might look like:
///
/// ```
/// fn check_user_permissions(user_id: &str) -> Result<bool> {
/// // Check if user exists first
/// let user = database.find_user(user_id)?;
///
/// // This is the part we want to modify
/// if user.role == "admin" || user.role == "superuser" {
/// return Ok(true);
/// }
///
/// // Check other permissions
/// check_custom_permissions(user_id)
/// }
/// ```
/// </example>
pub find: String,
/// The string to replace the one unique occurrence of the find string with.
pub replace: String,
}
pub struct FindReplaceFileTool;
impl Tool for FindReplaceFileTool {
fn name(&self) -> String {
"find-replace-file".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("find_replace_tool/description.md").to_string()
}
fn icon(&self) -> IconName {
IconName::Pencil
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(FindReplaceFileToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<FindReplaceFileToolInput>(input.clone()) {
Ok(input) => input.display_description,
Err(_) => "Edit file".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<FindReplaceFileToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
cx.spawn(async move |cx| {
let project_path = project.read_with(cx, |project, cx| {
project
.find_project_path(&input.path, cx)
.context("Path not found in project")
})??;
let buffer = project
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await?;
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
if input.find.is_empty() {
return Err(anyhow!("`find` string cannot be empty. Use a different tool if you want to create a file."));
}
let result = cx
.background_spawn(async move {
replace_exact(&input.find, &input.replace, &snapshot).await
})
.await;
if let Some(diff) = result {
buffer.update(cx, |buffer, cx| {
let _ = buffer.apply_diff(diff, cx);
})?;
project.update(cx, |project, cx| {
project.save_buffer(buffer.clone(), cx)
})?.await?;
action_log.update(cx, |log, cx| {
let mut buffers = HashSet::default();
buffers.insert(buffer);
log.buffer_edited(buffers, cx);
})?;
Ok(format!("Edited {}", input.path.display()))
} else {
let err = buffer.read_with(cx, |buffer, _cx| {
let file_exists = buffer
.file()
.map_or(false, |file| file.disk_state().exists());
if !file_exists {
anyhow!("{} does not exist", input.path.display())
} else if buffer.is_empty() {
anyhow!(
"{} is empty, so the provided `find` string wasn't found.",
input.path.display()
)
} else {
anyhow!("Failed to match the provided `find` string")
}
})?;
Err(err)
}
})
}
}

View File

@@ -1,7 +0,0 @@
Find one unique part of a file in the project and replace that text with new text.
This tool is the preferred way to make edits to files. If you have multiple edits to make, including edits across multiple files, then make a plan to respond with a single message containing multiple calls to this tool - one call for each find/replace operation.
You should use this tool when you want to edit a subset of a file's contents, but not the entire file. You should not use this tool when you want to replace the entire contents of a file with completely different contents. You also should not use this tool when you want to move or rename a file. You absolutely must NEVER use this tool to create new files from scratch. If you ever consider using this tool to create a new file from scratch, for any reason, instead you must reconsider and choose a different approach.
DO NOT call this tool until the code to be edited appears in the conversation! You must use another tool to read the file's contents into the conversation, or ask the user to add it to context first.

View File

@@ -6,8 +6,6 @@ use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{fmt::Write, path::Path, sync::Arc}; use std::{fmt::Write, path::Path, sync::Arc};
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ListDirectoryToolInput { pub struct ListDirectoryToolInput {
@@ -33,7 +31,7 @@ pub struct ListDirectoryToolInput {
/// ///
/// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`. /// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`.
/// </example> /// </example>
pub path: String, pub path: Arc<Path>,
} }
pub struct ListDirectoryTool; pub struct ListDirectoryTool;
@@ -43,33 +41,15 @@ impl Tool for ListDirectoryTool {
"list-directory".into() "list-directory".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./list_directory_tool/description.md").into() include_str!("./list_directory_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::Folder
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(ListDirectoryToolInput); let schema = schemars::schema_for!(ListDirectoryToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<ListDirectoryToolInput>(input.clone()) {
Ok(input) => {
let path = MarkdownString::escape(&input.path);
format!("List the `{path}` directory's contents")
}
Err(_) => "List directory".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -83,29 +63,8 @@ impl Tool for ListDirectoryTool {
Err(err) => return Task::ready(Err(anyhow!(err))), Err(err) => return Task::ready(Err(anyhow!(err))),
}; };
// Sometimes models will return these even though we tell it to give a path and not a glob.
// When this happens, just list the root worktree directories.
if matches!(input.path.as_str(), "." | "" | "./" | "*") {
let output = project
.read(cx)
.worktrees(cx)
.filter_map(|worktree| {
worktree.read(cx).root_entry().and_then(|entry| {
if entry.is_dir() {
entry.path.to_str()
} else {
None
}
})
})
.collect::<Vec<_>>()
.join("\n");
return Task::ready(Ok(output));
}
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else { let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!("Path {} not found in project", input.path))); return Task::ready(Err(anyhow!("Path not found in project")));
}; };
let Some(worktree) = project let Some(worktree) = project
.read(cx) .read(cx)
@@ -116,11 +75,11 @@ impl Tool for ListDirectoryTool {
let worktree = worktree.read(cx); let worktree = worktree.read(cx);
let Some(entry) = worktree.entry_for_path(&project_path.path) else { let Some(entry) = worktree.entry_for_path(&project_path.path) else {
return Task::ready(Err(anyhow!("Path not found: {}", input.path))); return Task::ready(Err(anyhow!("Path not found: {}", input.path.display())));
}; };
if !entry.is_dir() { if !entry.is_dir() {
return Task::ready(Err(anyhow!("{} is not a directory.", input.path))); return Task::ready(Err(anyhow!("{} is a file.", input.path.display())));
} }
let mut output = String::new(); let mut output = String::new();
@@ -133,7 +92,7 @@ impl Tool for ListDirectoryTool {
.unwrap(); .unwrap();
} }
if output.is_empty() { if output.is_empty() {
return Task::ready(Ok(format!("{} is empty.", input.path))); return Task::ready(Ok(format!("{} is empty.", input.path.display())));
} }
Task::ready(Ok(output)) Task::ready(Ok(output))
} }

View File

@@ -1,132 +0,0 @@
use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool};
use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{path::Path, sync::Arc};
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct MovePathToolInput {
/// The source path of the file or directory to move/rename.
///
/// <example>
/// If the project has the following files:
///
/// - directory1/a/something.txt
/// - directory2/a/things.txt
/// - directory3/a/other.txt
///
/// You can move the first file by providing a source_path of "directory1/a/something.txt"
/// </example>
pub source_path: String,
/// The destination path where the file or directory should be moved/renamed to.
/// If the paths are the same except for the filename, then this will be a rename.
///
/// <example>
/// To move "directory1/a/something.txt" to "directory2/b/renamed.txt",
/// provide a destination_path of "directory2/b/renamed.txt"
/// </example>
pub destination_path: String,
}
pub struct MovePathTool;
impl Tool for MovePathTool {
fn name(&self) -> String {
"move-path".into()
}
fn needs_confirmation(&self) -> bool {
true
}
fn description(&self) -> String {
include_str!("./move_path_tool/description.md").into()
}
fn icon(&self) -> IconName {
IconName::ArrowRightLeft
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(MovePathToolInput);
serde_json::to_value(&schema).unwrap()
}
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<MovePathToolInput>(input.clone()) {
Ok(input) => {
let src = MarkdownString::escape(&input.source_path);
let dest = MarkdownString::escape(&input.destination_path);
let src_path = Path::new(&input.source_path);
let dest_path = Path::new(&input.destination_path);
match dest_path
.file_name()
.and_then(|os_str| os_str.to_os_string().into_string().ok())
{
Some(filename) if src_path.parent() == dest_path.parent() => {
let filename = MarkdownString::escape(&filename);
format!("Rename `{src}` to `{filename}`")
}
_ => {
format!("Move `{src}` to `{dest}`")
}
}
}
Err(_) => "Move path".to_string(),
}
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<MovePathToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let rename_task = project.update(cx, |project, cx| {
match project
.find_project_path(&input.source_path, cx)
.and_then(|project_path| project.entry_for_path(&project_path, cx))
{
Some(entity) => match project.find_project_path(&input.destination_path, cx) {
Some(project_path) => project.rename_entry(entity.id, project_path.path, cx),
None => Task::ready(Err(anyhow!(
"Destination path {} was outside the project.",
input.destination_path
))),
},
None => Task::ready(Err(anyhow!(
"Source path {} was not found in the project.",
input.source_path
))),
}
});
cx.background_spawn(async move {
match rename_task.await {
Ok(_) => Ok(format!(
"Moved {} to {}",
input.source_path, input.destination_path
)),
Err(err) => Err(anyhow!(
"Failed to move {} to {}: {}",
input.source_path,
input.destination_path,
err
)),
}
})
}
}

View File

@@ -1,5 +0,0 @@
Moves or rename a file or directory in the project, and returns confirmation that the move succeeded.
If the source and destination directories are the same, but the filename is different, this performs
a rename. Otherwise, it performs a move.
This tool should be used when it's desirable to move or rename a file or directory without changing its contents at all.

View File

@@ -8,7 +8,6 @@ use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ui::IconName;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
@@ -32,27 +31,15 @@ impl Tool for NowTool {
"now".into() "now".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
"Returns the current datetime in RFC 3339 format. Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime.".into() "Returns the current datetime in RFC 3339 format. Only use this tool when the user specifically asks for it or the current task would benefit from knowing the current datetime.".into()
} }
fn icon(&self) -> IconName {
IconName::Info
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(NowToolInput); let schema = schemars::schema_for!(NowToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Get current time".to_string()
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,

View File

@@ -6,7 +6,6 @@ use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{path::PathBuf, sync::Arc}; use std::{path::PathBuf, sync::Arc};
use ui::IconName;
use util::paths::PathMatcher; use util::paths::PathMatcher;
use worktree::Snapshot; use worktree::Snapshot;
@@ -40,30 +39,15 @@ impl Tool for PathSearchTool {
"path-search".into() "path-search".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./path_search_tool/description.md").into() include_str!("./path_search_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::SearchCode
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(PathSearchToolInput); let schema = schemars::schema_for!(PathSearchToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<PathSearchToolInput>(input.clone()) {
Ok(input) => format!("Find paths matching “`{}`”", input.glob),
Err(_) => "Search paths".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -76,13 +60,9 @@ impl Tool for PathSearchTool {
Ok(input) => (input.offset.unwrap_or(0), input.glob), Ok(input) => (input.offset.unwrap_or(0), input.glob),
Err(err) => return Task::ready(Err(anyhow!(err))), Err(err) => return Task::ready(Err(anyhow!(err))),
}; };
let path_matcher = match PathMatcher::new(&[glob.clone()]) {
let path_matcher = match PathMatcher::new([
// Sometimes models try to search for "". In this case, return all paths in the project.
if glob.is_empty() { "*" } else { &glob },
]) {
Ok(matcher) => matcher, Ok(matcher) => matcher,
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {err}"))), Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
}; };
let snapshots: Vec<Snapshot> = project let snapshots: Vec<Snapshot> = project
.read(cx) .read(cx)

View File

@@ -9,8 +9,6 @@ use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ui::IconName;
use util::markdown::MarkdownString;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ReadFileToolInput { pub struct ReadFileToolInput {
@@ -46,33 +44,15 @@ impl Tool for ReadFileTool {
"read-file".into() "read-file".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./read_file_tool/description.md").into() include_str!("./read_file_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::Eye
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(ReadFileToolInput); let schema = schemars::schema_for!(ReadFileToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
Ok(input) => {
let path = MarkdownString::escape(&input.path.display().to_string());
format!("Read file `{path}`")
}
Err(_) => "Read file".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -87,10 +67,7 @@ impl Tool for ReadFileTool {
}; };
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else { let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!( return Task::ready(Err(anyhow!("Path not found in project")));
"Path {} not found in project",
&input.path.display()
)));
}; };
cx.spawn(async move |cx| { cx.spawn(async move |cx| {

View File

@@ -11,8 +11,6 @@ use project::{
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{cmp, fmt::Write, sync::Arc}; use std::{cmp, fmt::Write, sync::Arc};
use ui::IconName;
use util::markdown::MarkdownString;
use util::paths::PathMatcher; use util::paths::PathMatcher;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -24,17 +22,10 @@ pub struct RegexSearchToolInput {
/// Optional starting position for paginated results (0-based). /// Optional starting position for paginated results (0-based).
/// When not provided, starts from the beginning. /// When not provided, starts from the beginning.
#[serde(default)] #[serde(default)]
pub offset: Option<u32>, pub offset: Option<usize>,
} }
impl RegexSearchToolInput { const RESULTS_PER_PAGE: usize = 20;
/// Which page of search results this is.
pub fn page(&self) -> u32 {
1 + (self.offset.unwrap_or(0) / RESULTS_PER_PAGE)
}
}
const RESULTS_PER_PAGE: u32 = 20;
pub struct RegexSearchTool; pub struct RegexSearchTool;
@@ -43,39 +34,15 @@ impl Tool for RegexSearchTool {
"regex-search".into() "regex-search".into()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./regex_search_tool/description.md").into() include_str!("./regex_search_tool/description.md").into()
} }
fn icon(&self) -> IconName {
IconName::Regex
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(RegexSearchToolInput); let schema = schemars::schema_for!(RegexSearchToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<RegexSearchToolInput>(input.clone()) {
Ok(input) => {
let page = input.page();
let regex = MarkdownString::escape(&input.regex);
if page > 1 {
format!("Get page {page} of search results for regex “`{regex}`”")
} else {
format!("Search files for regex “`{regex}`”")
}
}
Err(_) => "Search with regex".to_string(),
}
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,
@@ -187,7 +154,7 @@ impl Tool for RegexSearchTool {
offset + matches_found, offset + matches_found,
offset + RESULTS_PER_PAGE, offset + RESULTS_PER_PAGE,
)) ))
} else { } else {
Ok(format!("Found {matches_found} matches:\n{output}")) Ok(format!("Found {matches_found} matches:\n{output}"))
} }
}) })

View File

@@ -7,7 +7,6 @@ use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ui::IconName;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct ThinkingToolInput { pub struct ThinkingToolInput {
@@ -23,27 +22,15 @@ impl Tool for ThinkingTool {
"thinking".to_string() "thinking".to_string()
} }
fn needs_confirmation(&self) -> bool {
false
}
fn description(&self) -> String { fn description(&self) -> String {
include_str!("./thinking_tool/description.md").to_string() include_str!("./thinking_tool/description.md").to_string()
} }
fn icon(&self) -> IconName {
IconName::Brain
}
fn input_schema(&self) -> serde_json::Value { fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(ThinkingToolInput); let schema = schemars::schema_for!(ThinkingToolInput);
serde_json::to_value(&schema).unwrap() serde_json::to_value(&schema).unwrap()
} }
fn ui_text(&self, _input: &serde_json::Value) -> String {
"Thinking".to_string()
}
fn run( fn run(
self: Arc<Self>, self: Arc<Self>,
input: serde_json::Value, input: serde_json::Value,

View File

@@ -7,7 +7,8 @@ use std::cmp::Ordering;
use std::mem; use std::mem;
use std::{future::Future, iter, ops::Range, sync::Arc}; use std::{future::Future, iter, ops::Range, sync::Arc};
use sum_tree::SumTree; use sum_tree::SumTree;
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _}; use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
use text::{AnchorRangeExt, ToOffset as _};
use util::ResultExt; use util::ResultExt;
pub struct BufferDiff { pub struct BufferDiff {
@@ -188,7 +189,7 @@ impl BufferDiffSnapshot {
impl BufferDiffInner { impl BufferDiffInner {
/// Returns the new index text and new pending hunks. /// Returns the new index text and new pending hunks.
fn stage_or_unstage_hunks_impl( fn stage_or_unstage_hunks(
&mut self, &mut self,
unstaged_diff: &Self, unstaged_diff: &Self,
stage: bool, stage: bool,
@@ -233,6 +234,9 @@ impl BufferDiffInner {
} }
}; };
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
unstaged_hunk_cursor.next(buffer);
let mut pending_hunks = SumTree::new(buffer); let mut pending_hunks = SumTree::new(buffer);
let mut old_pending_hunks = unstaged_diff let mut old_pending_hunks = unstaged_diff
.pending_hunks .pending_hunks
@@ -248,16 +252,18 @@ impl BufferDiffInner {
{ {
let preceding_pending_hunks = let preceding_pending_hunks =
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer); old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
pending_hunks.append(preceding_pending_hunks, buffer); pending_hunks.append(preceding_pending_hunks, buffer);
// Skip all overlapping or adjacent old pending hunks // skip all overlapping old pending hunks
while old_pending_hunks.item().is_some_and(|old_hunk| { while old_pending_hunks
old_hunk .item()
.buffer_range .is_some_and(|preceding_pending_hunk_item| {
.start preceding_pending_hunk_item
.cmp(&buffer_range.end, buffer) .buffer_range
.is_le() .overlaps(&buffer_range, buffer)
}) { })
{
old_pending_hunks.next(buffer); old_pending_hunks.next(buffer);
} }
@@ -285,9 +291,6 @@ impl BufferDiffInner {
// append the remainder // append the remainder
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer); pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
unstaged_hunk_cursor.next(buffer);
let mut prev_unstaged_hunk_buffer_offset = 0; let mut prev_unstaged_hunk_buffer_offset = 0;
let mut prev_unstaged_hunk_base_text_offset = 0; let mut prev_unstaged_hunk_base_text_offset = 0;
let mut edits = Vec::<(Range<usize>, String)>::new(); let mut edits = Vec::<(Range<usize>, String)>::new();
@@ -354,13 +357,7 @@ impl BufferDiffInner {
edits.push((index_range, replacement_text)); edits.push((index_range, replacement_text));
} }
#[cfg(debug_assertions)] // invariants: non-overlapping and sorted debug_assert!(edits.iter().is_sorted_by_key(|(range, _)| range.start));
{
for window in edits.windows(2) {
let (range_a, range_b) = (&window[0].0, &window[1].0);
debug_assert!(range_a.end < range_b.start);
}
}
let mut new_index_text = Rope::new(); let mut new_index_text = Rope::new();
let mut index_cursor = index_text.cursor(0); let mut index_cursor = index_text.cursor(0);
@@ -857,7 +854,7 @@ impl BufferDiff {
file_exists: bool, file_exists: bool,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Option<Rope> { ) -> Option<Rope> {
let (new_index_text, new_pending_hunks) = self.inner.stage_or_unstage_hunks_impl( let (new_index_text, new_pending_hunks) = self.inner.stage_or_unstage_hunks(
&self.secondary_diff.as_ref()?.read(cx).inner, &self.secondary_diff.as_ref()?.read(cx).inner,
stage, stage,
&hunks, &hunks,
@@ -1243,13 +1240,13 @@ impl DiffHunkStatus {
} }
} }
/// Range (crossing new lines), old, new
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
#[track_caller] #[track_caller]
pub fn assert_hunks<ExpectedText, HunkIter>( pub fn assert_hunks<ExpectedText, HunkIter>(
diff_hunks: HunkIter, diff_hunks: HunkIter,
buffer: &text::BufferSnapshot, buffer: &text::BufferSnapshot,
diff_base: &str, diff_base: &str,
// Line range, deleted, added, status
expected_hunks: &[(Range<u32>, ExpectedText, ExpectedText, DiffHunkStatus)], expected_hunks: &[(Range<u32>, ExpectedText, ExpectedText, DiffHunkStatus)],
) where ) where
HunkIter: Iterator<Item = DiffHunk>, HunkIter: Iterator<Item = DiffHunk>,
@@ -1270,11 +1267,11 @@ pub fn assert_hunks<ExpectedText, HunkIter>(
let expected_hunks: Vec<_> = expected_hunks let expected_hunks: Vec<_> = expected_hunks
.iter() .iter()
.map(|(line_range, deleted_text, added_text, status)| { .map(|(r, old_text, new_text, status)| {
( (
Point::new(line_range.start, 0)..Point::new(line_range.end, 0), Point::new(r.start, 0)..Point::new(r.end, 0),
deleted_text.as_ref(), old_text.as_ref(),
added_text.as_ref().to_string(), new_text.as_ref().to_string(),
*status, *status,
) )
}) })
@@ -1289,7 +1286,6 @@ mod tests {
use super::*; use super::*;
use gpui::TestAppContext; use gpui::TestAppContext;
use pretty_assertions::{assert_eq, assert_ne};
use rand::{rngs::StdRng, Rng as _}; use rand::{rngs::StdRng, Rng as _};
use text::{Buffer, BufferId, Rope}; use text::{Buffer, BufferId, Rope};
use unindent::Unindent as _; use unindent::Unindent as _;
@@ -1709,66 +1705,6 @@ mod tests {
} }
} }
#[gpui::test]
async fn test_toggling_stage_and_unstage_same_hunk(cx: &mut TestAppContext) {
let head_text = "
one
two
three
"
.unindent();
let index_text = head_text.clone();
let buffer_text = "
one
three
"
.unindent();
let buffer = Buffer::new(0, BufferId::new(1).unwrap(), buffer_text.clone());
let unstaged = BufferDiff::build_sync(buffer.clone(), index_text, cx);
let uncommitted = BufferDiff::build_sync(buffer.clone(), head_text.clone(), cx);
let unstaged_diff = cx.new(|cx| {
let mut diff = BufferDiff::new(&buffer, cx);
diff.set_state(unstaged, &buffer);
diff
});
let uncommitted_diff = cx.new(|cx| {
let mut diff = BufferDiff::new(&buffer, cx);
diff.set_state(uncommitted, &buffer);
diff.set_secondary_diff(unstaged_diff.clone());
diff
});
uncommitted_diff.update(cx, |diff, cx| {
let hunk = diff.hunks(&buffer, cx).next().unwrap();
let new_index_text = diff
.stage_or_unstage_hunks(true, &[hunk.clone()], &buffer, true, cx)
.unwrap()
.to_string();
assert_eq!(new_index_text, buffer_text);
let hunk = diff.hunks(&buffer, &cx).next().unwrap();
assert_eq!(
hunk.secondary_status,
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
);
let index_text = diff
.stage_or_unstage_hunks(false, &[hunk], &buffer, true, cx)
.unwrap()
.to_string();
assert_eq!(index_text, head_text);
let hunk = diff.hunks(&buffer, &cx).next().unwrap();
// optimistically unstaged (fine, could also be HasSecondaryHunk)
assert_eq!(
hunk.secondary_status,
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
);
});
}
#[gpui::test] #[gpui::test]
async fn test_buffer_diff_compare(cx: &mut TestAppContext) { async fn test_buffer_diff_compare(cx: &mut TestAppContext) {
let base_text = " let base_text = "

View File

@@ -469,25 +469,18 @@ impl Room {
let project = handle.read(cx); let project = handle.read(cx);
if let Some(project_id) = project.remote_id() { if let Some(project_id) = project.remote_id() {
projects.insert(project_id, handle.clone()); projects.insert(project_id, handle.clone());
let mut worktrees = Vec::new();
let mut repositories = Vec::new();
for worktree in project.worktrees(cx) {
let worktree = worktree.read(cx);
worktrees.push(proto::RejoinWorktree {
id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
});
for repository in worktree.repositories().iter() {
repositories.push(proto::RejoinRepository {
id: repository.work_directory_id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
});
}
}
rejoined_projects.push(proto::RejoinProject { rejoined_projects.push(proto::RejoinProject {
id: project_id, id: project_id,
worktrees, worktrees: project
repositories, .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
proto::RejoinWorktree {
id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
}
})
.collect(),
}); });
} }
return true; return true;

View File

@@ -524,27 +524,18 @@ impl Room {
let project = handle.read(cx); let project = handle.read(cx);
if let Some(project_id) = project.remote_id() { if let Some(project_id) = project.remote_id() {
projects.insert(project_id, handle.clone()); projects.insert(project_id, handle.clone());
let mut worktrees = Vec::new();
let mut repositories = Vec::new();
for worktree in project.worktrees(cx) {
let worktree = worktree.read(cx);
worktrees.push(proto::RejoinWorktree {
id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
});
}
for (entry_id, repository) in project.repositories(cx) {
let repository = repository.read(cx);
repositories.push(proto::RejoinRepository {
id: entry_id.to_proto(),
scan_id: repository.completed_scan_id as u64,
});
}
rejoined_projects.push(proto::RejoinProject { rejoined_projects.push(proto::RejoinProject {
id: project_id, id: project_id,
worktrees, worktrees: project
repositories, .worktrees(cx)
.map(|worktree| {
let worktree = worktree.read(cx);
proto::RejoinWorktree {
id: worktree.id().to_proto(),
scan_id: worktree.completed_scan_id() as u64,
}
})
.collect(),
}); });
} }
return true; return true;

View File

@@ -27,7 +27,6 @@ trait InstalledApp {
fn zed_version_string(&self) -> String; fn zed_version_string(&self) -> String;
fn launch(&self, ipc_url: String) -> anyhow::Result<()>; fn launch(&self, ipc_url: String) -> anyhow::Result<()>;
fn run_foreground(&self, ipc_url: String) -> io::Result<ExitStatus>; fn run_foreground(&self, ipc_url: String) -> io::Result<ExitStatus>;
fn path(&self) -> PathBuf;
} }
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
@@ -74,10 +73,6 @@ struct Args {
/// Run zed in dev-server mode /// Run zed in dev-server mode
#[arg(long)] #[arg(long)]
dev_server_token: Option<String>, dev_server_token: Option<String>,
/// Not supported in Zed CLI, only supported on Zed binary
/// Will attempt to give the correct command to run
#[arg(long)]
system_specs: bool,
/// Uninstall Zed from user system /// Uninstall Zed from user system
#[cfg(all( #[cfg(all(
any(target_os = "linux", target_os = "macos"), any(target_os = "linux", target_os = "macos"),
@@ -145,16 +140,6 @@ fn main() -> Result<()> {
return Ok(()); return Ok(());
} }
if args.system_specs {
let path = app.path();
let msg = [
"The `--system-specs` argument is not supported in the Zed CLI, only on Zed binary.",
"To retrieve the system specs on the command line, run the following command:",
&format!("{} --system-specs", path.display()),
];
return Err(anyhow::anyhow!(msg.join("\n")));
}
#[cfg(all( #[cfg(all(
any(target_os = "linux", target_os = "macos"), any(target_os = "linux", target_os = "macos"),
not(feature = "no-bundled-uninstall") not(feature = "no-bundled-uninstall")
@@ -452,10 +437,6 @@ mod linux {
.arg(ipc_url) .arg(ipc_url)
.status() .status()
} }
fn path(&self) -> PathBuf {
self.0.clone()
}
} }
impl App { impl App {
@@ -693,10 +674,6 @@ mod windows {
.spawn()? .spawn()?
.wait() .wait()
} }
fn path(&self) -> PathBuf {
self.0.clone()
}
} }
impl Detect { impl Detect {
@@ -899,13 +876,6 @@ mod mac_os {
std::process::Command::new(path).arg(ipc_url).status() std::process::Command::new(path).arg(ipc_url).status()
} }
fn path(&self) -> PathBuf {
match self {
Bundle::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed").clone(),
Bundle::LocalPath { executable, .. } => executable.clone(),
}
}
} }
impl Bundle { impl Bundle {

View File

@@ -15,13 +15,9 @@ CREATE TABLE "users" (
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE, "github_user_created_at" TIMESTAMP WITHOUT TIME ZONE,
"custom_llm_monthly_allowance_in_cents" INTEGER "custom_llm_monthly_allowance_in_cents" INTEGER
); );
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login"); CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code"); CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address"); CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
CREATE UNIQUE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id"); CREATE UNIQUE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
CREATE TABLE "access_tokens" ( CREATE TABLE "access_tokens" (
@@ -30,7 +26,6 @@ CREATE TABLE "access_tokens" (
"impersonated_user_id" INTEGER REFERENCES users (id), "impersonated_user_id" INTEGER REFERENCES users (id),
"hash" VARCHAR(128) "hash" VARCHAR(128)
); );
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id"); CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
CREATE TABLE "contacts" ( CREATE TABLE "contacts" (
@@ -41,9 +36,7 @@ CREATE TABLE "contacts" (
"should_notify" BOOLEAN NOT NULL, "should_notify" BOOLEAN NOT NULL,
"accepted" BOOLEAN NOT NULL "accepted" BOOLEAN NOT NULL
); );
CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b"); CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b"); CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
CREATE TABLE "rooms" ( CREATE TABLE "rooms" (
@@ -52,7 +45,6 @@ CREATE TABLE "rooms" (
"environment" VARCHAR, "environment" VARCHAR,
"channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE
); );
CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id"); CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
CREATE TABLE "projects" ( CREATE TABLE "projects" (
@@ -63,9 +55,7 @@ CREATE TABLE "projects" (
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE "unregistered" BOOLEAN NOT NULL DEFAULT FALSE
); );
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id"); CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
CREATE TABLE "worktrees" ( CREATE TABLE "worktrees" (
@@ -77,9 +67,8 @@ CREATE TABLE "worktrees" (
"scan_id" INTEGER NOT NULL, "scan_id" INTEGER NOT NULL,
"is_complete" BOOL NOT NULL DEFAULT FALSE, "is_complete" BOOL NOT NULL DEFAULT FALSE,
"completed_scan_id" INTEGER NOT NULL, "completed_scan_id" INTEGER NOT NULL,
PRIMARY KEY (project_id, id) PRIMARY KEY(project_id, id)
); );
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id"); CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
CREATE TABLE "worktree_entries" ( CREATE TABLE "worktree_entries" (
@@ -98,33 +87,32 @@ CREATE TABLE "worktree_entries" (
"is_deleted" BOOL NOT NULL, "is_deleted" BOOL NOT NULL,
"git_status" INTEGER, "git_status" INTEGER,
"is_fifo" BOOL NOT NULL, "is_fifo" BOOL NOT NULL,
PRIMARY KEY (project_id, worktree_id, id), PRIMARY KEY(project_id, worktree_id, id),
FOREIGN KEY (project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
); );
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id"); CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id"); CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
CREATE TABLE "project_repositories" ( CREATE TABLE "worktree_repositories" (
"project_id" INTEGER NOT NULL, "project_id" INTEGER NOT NULL,
"abs_path" VARCHAR, "worktree_id" INTEGER NOT NULL,
"id" INTEGER NOT NULL, "work_directory_id" INTEGER NOT NULL,
"entry_ids" VARCHAR,
"legacy_worktree_id" INTEGER,
"branch" VARCHAR, "branch" VARCHAR,
"scan_id" INTEGER NOT NULL, "scan_id" INTEGER NOT NULL,
"is_deleted" BOOL NOT NULL, "is_deleted" BOOL NOT NULL,
"current_merge_conflicts" VARCHAR, "current_merge_conflicts" VARCHAR,
"branch_summary" VARCHAR, "branch_summary" VARCHAR,
PRIMARY KEY (project_id, id) PRIMARY KEY(project_id, worktree_id, work_directory_id),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
); );
CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
CREATE INDEX "index_project_repositories_on_project_id" ON "project_repositories" ("project_id"); CREATE TABLE "worktree_repository_statuses" (
CREATE TABLE "project_repository_statuses" (
"project_id" INTEGER NOT NULL, "project_id" INTEGER NOT NULL,
"repository_id" INTEGER NOT NULL, "worktree_id" INT8 NOT NULL,
"work_directory_id" INT8 NOT NULL,
"repo_path" VARCHAR NOT NULL, "repo_path" VARCHAR NOT NULL,
"status" INT8 NOT NULL, "status" INT8 NOT NULL,
"status_kind" INT4 NOT NULL, "status_kind" INT4 NOT NULL,
@@ -132,12 +120,13 @@ CREATE TABLE "project_repository_statuses" (
"second_status" INT4 NULL, "second_status" INT4 NULL,
"scan_id" INT8 NOT NULL, "scan_id" INT8 NOT NULL,
"is_deleted" BOOL NOT NULL, "is_deleted" BOOL NOT NULL,
PRIMARY KEY (project_id, repository_id, repo_path) PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
); );
CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
CREATE INDEX "index_project_repos_statuses_on_project_id" ON "project_repository_statuses" ("project_id"); CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
CREATE INDEX "index_project_repos_statuses_on_project_id_and_repo_id" ON "project_repository_statuses" ("project_id", "repository_id");
CREATE TABLE "worktree_settings_files" ( CREATE TABLE "worktree_settings_files" (
"project_id" INTEGER NOT NULL, "project_id" INTEGER NOT NULL,
@@ -145,12 +134,10 @@ CREATE TABLE "worktree_settings_files" (
"path" VARCHAR NOT NULL, "path" VARCHAR NOT NULL,
"content" TEXT, "content" TEXT,
"kind" VARCHAR, "kind" VARCHAR,
PRIMARY KEY (project_id, worktree_id, path), PRIMARY KEY(project_id, worktree_id, path),
FOREIGN KEY (project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
); );
CREATE INDEX "index_worktree_settings_files_on_project_id" ON "worktree_settings_files" ("project_id"); CREATE INDEX "index_worktree_settings_files_on_project_id" ON "worktree_settings_files" ("project_id");
CREATE INDEX "index_worktree_settings_files_on_project_id_and_worktree_id" ON "worktree_settings_files" ("project_id", "worktree_id"); CREATE INDEX "index_worktree_settings_files_on_project_id_and_worktree_id" ON "worktree_settings_files" ("project_id", "worktree_id");
CREATE TABLE "worktree_diagnostic_summaries" ( CREATE TABLE "worktree_diagnostic_summaries" (
@@ -160,21 +147,18 @@ CREATE TABLE "worktree_diagnostic_summaries" (
"language_server_id" INTEGER NOT NULL, "language_server_id" INTEGER NOT NULL,
"error_count" INTEGER NOT NULL, "error_count" INTEGER NOT NULL,
"warning_count" INTEGER NOT NULL, "warning_count" INTEGER NOT NULL,
PRIMARY KEY (project_id, worktree_id, path), PRIMARY KEY(project_id, worktree_id, path),
FOREIGN KEY (project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
); );
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id"); CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id"); CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
CREATE TABLE "language_servers" ( CREATE TABLE "language_servers" (
"id" INTEGER NOT NULL, "id" INTEGER NOT NULL,
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE, "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
"name" VARCHAR NOT NULL, "name" VARCHAR NOT NULL,
PRIMARY KEY (project_id, id) PRIMARY KEY(project_id, id)
); );
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id"); CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
CREATE TABLE "project_collaborators" ( CREATE TABLE "project_collaborators" (
@@ -186,20 +170,11 @@ CREATE TABLE "project_collaborators" (
"replica_id" INTEGER NOT NULL, "replica_id" INTEGER NOT NULL,
"is_host" BOOLEAN NOT NULL "is_host" BOOLEAN NOT NULL
); );
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id"); CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id"); CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id"); CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id");
CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id"); CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id");
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" (
"project_id",
"connection_id",
"connection_server_id"
);
CREATE TABLE "room_participants" ( CREATE TABLE "room_participants" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT, "id" INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -218,21 +193,12 @@ CREATE TABLE "room_participants" (
"role" TEXT, "role" TEXT,
"in_call" BOOLEAN NOT NULL DEFAULT FALSE "in_call" BOOLEAN NOT NULL DEFAULT FALSE
); );
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id"); CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id"); CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");
CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id"); CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id");
CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id"); CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id");
CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id"); CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id");
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" (
"answering_connection_id",
"answering_connection_server_id"
);
CREATE TABLE "servers" ( CREATE TABLE "servers" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT, "id" INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -248,15 +214,9 @@ CREATE TABLE "followers" (
"follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"follower_connection_id" INTEGER NOT NULL "follower_connection_id" INTEGER NOT NULL
); );
CREATE UNIQUE INDEX
CREATE UNIQUE INDEX "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id" ON "followers" ( "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id"
"project_id", ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id");
"leader_connection_server_id",
"leader_connection_id",
"follower_connection_server_id",
"follower_connection_id"
);
CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");
CREATE TABLE "channels" ( CREATE TABLE "channels" (
@@ -277,7 +237,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
"connection_id" INTEGER NOT NULL, "connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE
); );
CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id"); CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
CREATE TABLE IF NOT EXISTS "channel_messages" ( CREATE TABLE IF NOT EXISTS "channel_messages" (
@@ -290,9 +249,7 @@ CREATE TABLE IF NOT EXISTS "channel_messages" (
"nonce" BLOB NOT NULL, "nonce" BLOB NOT NULL,
"reply_to_message_id" INTEGER DEFAULT NULL "reply_to_message_id" INTEGER DEFAULT NULL
); );
CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id"); CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce"); CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
CREATE TABLE "channel_message_mentions" ( CREATE TABLE "channel_message_mentions" (
@@ -300,7 +257,7 @@ CREATE TABLE "channel_message_mentions" (
"start_offset" INTEGER NOT NULL, "start_offset" INTEGER NOT NULL,
"end_offset" INTEGER NOT NULL, "end_offset" INTEGER NOT NULL,
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
PRIMARY KEY (message_id, start_offset) PRIMARY KEY(message_id, start_offset)
); );
CREATE TABLE "channel_members" ( CREATE TABLE "channel_members" (
@@ -331,7 +288,7 @@ CREATE TABLE "buffer_operations" (
"replica_id" INTEGER NOT NULL, "replica_id" INTEGER NOT NULL,
"lamport_timestamp" INTEGER NOT NULL, "lamport_timestamp" INTEGER NOT NULL,
"value" BLOB NOT NULL, "value" BLOB NOT NULL,
PRIMARY KEY (buffer_id, epoch, lamport_timestamp, replica_id) PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
); );
CREATE TABLE "buffer_snapshots" ( CREATE TABLE "buffer_snapshots" (
@@ -339,7 +296,7 @@ CREATE TABLE "buffer_snapshots" (
"epoch" INTEGER NOT NULL, "epoch" INTEGER NOT NULL,
"text" TEXT NOT NULL, "text" TEXT NOT NULL,
"operation_serialization_version" INTEGER NOT NULL, "operation_serialization_version" INTEGER NOT NULL,
PRIMARY KEY (buffer_id, epoch) PRIMARY KEY(buffer_id, epoch)
); );
CREATE TABLE "channel_buffer_collaborators" ( CREATE TABLE "channel_buffer_collaborators" (
@@ -353,18 +310,11 @@ CREATE TABLE "channel_buffer_collaborators" (
); );
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" (
"channel_id",
"connection_id",
"connection_server_id"
);
CREATE TABLE "feature_flags" ( CREATE TABLE "feature_flags" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT, "id" INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -374,6 +324,7 @@ CREATE TABLE "feature_flags" (
CREATE INDEX "index_feature_flags" ON "feature_flags" ("id"); CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
CREATE TABLE "user_features" ( CREATE TABLE "user_features" (
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE, "feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE,
@@ -381,11 +332,10 @@ CREATE TABLE "user_features" (
); );
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id"); CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id"); CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id"); CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
CREATE TABLE "observed_buffer_edits" ( CREATE TABLE "observed_buffer_edits" (
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
@@ -424,10 +374,13 @@ CREATE TABLE "notifications" (
"response" BOOLEAN "response" BOOLEAN
); );
CREATE INDEX "index_notifications_on_recipient_id_is_read_kind_entity_id" ON "notifications" ("recipient_id", "is_read", "kind", "entity_id"); CREATE INDEX
"index_notifications_on_recipient_id_is_read_kind_entity_id"
ON "notifications"
("recipient_id", "is_read", "kind", "entity_id");
CREATE TABLE contributors ( CREATE TABLE contributors (
user_id INTEGER REFERENCES users (id), user_id INTEGER REFERENCES users(id),
signed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, signed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (user_id) PRIMARY KEY (user_id)
); );
@@ -441,7 +394,7 @@ CREATE TABLE extensions (
); );
CREATE TABLE extension_versions ( CREATE TABLE extension_versions (
extension_id INTEGER REFERENCES extensions (id), extension_id INTEGER REFERENCES extensions(id),
version TEXT NOT NULL, version TEXT NOT NULL,
published_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, published_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
authors TEXT NOT NULL, authors TEXT NOT NULL,
@@ -463,7 +416,6 @@ CREATE TABLE extension_versions (
); );
CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id"); CREATE UNIQUE INDEX "index_extensions_external_id" ON "extensions" ("external_id");
CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count"); CREATE INDEX "index_extensions_total_download_count" ON "extensions" ("total_download_count");
CREATE TABLE rate_buckets ( CREATE TABLE rate_buckets (
@@ -472,15 +424,14 @@ CREATE TABLE rate_buckets (
token_count INT NOT NULL, token_count INT NOT NULL,
last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL, last_refill TIMESTAMP WITHOUT TIME ZONE NOT NULL,
PRIMARY KEY (user_id, rate_limit_name), PRIMARY KEY (user_id, rate_limit_name),
FOREIGN KEY (user_id) REFERENCES users (id) FOREIGN KEY (user_id) REFERENCES users(id)
); );
CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name); CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name);
CREATE TABLE IF NOT EXISTS billing_preferences ( CREATE TABLE IF NOT EXISTS billing_preferences (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
user_id INTEGER NOT NULL REFERENCES users (id), user_id INTEGER NOT NULL REFERENCES users(id),
max_monthly_llm_usage_spending_in_cents INTEGER NOT NULL max_monthly_llm_usage_spending_in_cents INTEGER NOT NULL
); );
@@ -489,19 +440,18 @@ CREATE UNIQUE INDEX "uix_billing_preferences_on_user_id" ON billing_preferences
CREATE TABLE IF NOT EXISTS billing_customers ( CREATE TABLE IF NOT EXISTS billing_customers (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
user_id INTEGER NOT NULL REFERENCES users (id), user_id INTEGER NOT NULL REFERENCES users(id),
has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE, has_overdue_invoices BOOLEAN NOT NULL DEFAULT FALSE,
stripe_customer_id TEXT NOT NULL stripe_customer_id TEXT NOT NULL
); );
CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id); CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id);
CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id); CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id);
CREATE TABLE IF NOT EXISTS billing_subscriptions ( CREATE TABLE IF NOT EXISTS billing_subscriptions (
id INTEGER PRIMARY KEY AUTOINCREMENT, id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
billing_customer_id INTEGER NOT NULL REFERENCES billing_customers (id), billing_customer_id INTEGER NOT NULL REFERENCES billing_customers(id),
stripe_subscription_id TEXT NOT NULL, stripe_subscription_id TEXT NOT NULL,
stripe_subscription_status TEXT NOT NULL, stripe_subscription_status TEXT NOT NULL,
stripe_cancel_at TIMESTAMP, stripe_cancel_at TIMESTAMP,
@@ -509,7 +459,6 @@ CREATE TABLE IF NOT EXISTS billing_subscriptions (
); );
CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id); CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id);
CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id); CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id);
CREATE TABLE IF NOT EXISTS processed_stripe_events ( CREATE TABLE IF NOT EXISTS processed_stripe_events (
@@ -530,5 +479,4 @@ CREATE TABLE IF NOT EXISTS "breakpoints" (
"path" TEXT NOT NULL, "path" TEXT NOT NULL,
"kind" VARCHAR NOT NULL "kind" VARCHAR NOT NULL
); );
CREATE INDEX "index_breakpoints_on_project_id" ON "breakpoints" ("project_id"); CREATE INDEX "index_breakpoints_on_project_id" ON "breakpoints" ("project_id");

View File

@@ -1,32 +0,0 @@
CREATE TABLE "project_repositories" (
"project_id" INTEGER NOT NULL,
"abs_path" VARCHAR,
"id" INT8 NOT NULL,
"legacy_worktree_id" INT8,
"entry_ids" VARCHAR,
"branch" VARCHAR,
"scan_id" INT8 NOT NULL,
"is_deleted" BOOL NOT NULL,
"current_merge_conflicts" VARCHAR,
"branch_summary" VARCHAR,
PRIMARY KEY (project_id, id)
);
CREATE INDEX "index_project_repositories_on_project_id" ON "project_repositories" ("project_id");
CREATE TABLE "project_repository_statuses" (
"project_id" INTEGER NOT NULL,
"repository_id" INT8 NOT NULL,
"repo_path" VARCHAR NOT NULL,
"status" INT8 NOT NULL,
"status_kind" INT4 NOT NULL,
"first_status" INT4 NULL,
"second_status" INT4 NULL,
"scan_id" INT8 NOT NULL,
"is_deleted" BOOL NOT NULL,
PRIMARY KEY (project_id, repository_id, repo_path)
);
CREATE INDEX "index_project_repos_statuses_on_project_id" ON "project_repository_statuses" ("project_id");
CREATE INDEX "index_project_repos_statuses_on_project_id_and_repo_id" ON "project_repository_statuses" ("project_id", "repository_id");

View File

@@ -9,7 +9,6 @@ use anyhow::anyhow;
use collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use dashmap::DashMap; use dashmap::DashMap;
use futures::StreamExt; use futures::StreamExt;
use project_repository_statuses::StatusKind;
use rand::{prelude::StdRng, Rng, SeedableRng}; use rand::{prelude::StdRng, Rng, SeedableRng};
use rpc::ExtensionProvides; use rpc::ExtensionProvides;
use rpc::{ use rpc::{
@@ -37,6 +36,7 @@ use std::{
}; };
use time::PrimitiveDateTime; use time::PrimitiveDateTime;
use tokio::sync::{Mutex, OwnedMutexGuard}; use tokio::sync::{Mutex, OwnedMutexGuard};
use worktree_repository_statuses::StatusKind;
use worktree_settings_file::LocalSettingsKind; use worktree_settings_file::LocalSettingsKind;
#[cfg(test)] #[cfg(test)]
@@ -658,8 +658,6 @@ pub struct RejoinedProject {
pub old_connection_id: ConnectionId, pub old_connection_id: ConnectionId,
pub collaborators: Vec<ProjectCollaborator>, pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: Vec<RejoinedWorktree>, pub worktrees: Vec<RejoinedWorktree>,
pub updated_repositories: Vec<proto::UpdateRepository>,
pub removed_repositories: Vec<u64>,
pub language_servers: Vec<proto::LanguageServer>, pub language_servers: Vec<proto::LanguageServer>,
} }
@@ -728,7 +726,6 @@ pub struct Project {
pub role: ChannelRole, pub role: ChannelRole,
pub collaborators: Vec<ProjectCollaborator>, pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>, pub worktrees: BTreeMap<u64, Worktree>,
pub repositories: Vec<proto::UpdateRepository>,
pub language_servers: Vec<proto::LanguageServer>, pub language_servers: Vec<proto::LanguageServer>,
} }
@@ -763,7 +760,7 @@ pub struct Worktree {
pub root_name: String, pub root_name: String,
pub visible: bool, pub visible: bool,
pub entries: Vec<proto::Entry>, pub entries: Vec<proto::Entry>,
pub legacy_repository_entries: BTreeMap<u64, proto::RepositoryEntry>, pub repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
pub diagnostic_summaries: Vec<proto::DiagnosticSummary>, pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
pub settings_files: Vec<WorktreeSettingsFile>, pub settings_files: Vec<WorktreeSettingsFile>,
pub scan_id: u64, pub scan_id: u64,
@@ -813,7 +810,7 @@ impl LocalSettingsKind {
} }
fn db_status_to_proto( fn db_status_to_proto(
entry: project_repository_statuses::Model, entry: worktree_repository_statuses::Model,
) -> anyhow::Result<proto::StatusEntry> { ) -> anyhow::Result<proto::StatusEntry> {
use proto::git_file_status::{Tracked, Unmerged, Variant}; use proto::git_file_status::{Tracked, Unmerged, Variant};

View File

@@ -324,135 +324,119 @@ impl Database {
.await?; .await?;
} }
// Backward-compatibility for old Zed clients. if !update.updated_repositories.is_empty() {
// worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
// Remove this block when Zed 1.80 stable has been out for a week. |repository| {
{ worktree_repository::ActiveModel {
if !update.updated_repositories.is_empty() { project_id: ActiveValue::set(project_id),
project_repository::Entity::insert_many( worktree_id: ActiveValue::set(worktree_id),
update.updated_repositories.iter().map(|repository| { work_directory_id: ActiveValue::set(
project_repository::ActiveModel { repository.work_directory_id as i64,
project_id: ActiveValue::set(project_id), ),
legacy_worktree_id: ActiveValue::set(Some(worktree_id)), scan_id: ActiveValue::set(update.scan_id as i64),
id: ActiveValue::set(repository.work_directory_id as i64), branch: ActiveValue::set(repository.branch.clone()),
scan_id: ActiveValue::set(update.scan_id as i64), is_deleted: ActiveValue::set(false),
is_deleted: ActiveValue::set(false), branch_summary: ActiveValue::Set(
branch_summary: ActiveValue::Set( repository
repository .branch_summary
.branch_summary .as_ref()
.as_ref() .map(|summary| serde_json::to_string(summary).unwrap()),
.map(|summary| serde_json::to_string(summary).unwrap()), ),
), current_merge_conflicts: ActiveValue::Set(Some(
current_merge_conflicts: ActiveValue::Set(Some( serde_json::to_string(&repository.current_merge_conflicts).unwrap(),
serde_json::to_string(&repository.current_merge_conflicts) )),
.unwrap(), }
)), },
))
.on_conflict(
OnConflict::columns([
worktree_repository::Column::ProjectId,
worktree_repository::Column::WorktreeId,
worktree_repository::Column::WorkDirectoryId,
])
.update_columns([
worktree_repository::Column::ScanId,
worktree_repository::Column::Branch,
worktree_repository::Column::BranchSummary,
worktree_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
.exec(&*tx)
.await?;
// Old clients do not use abs path or entry ids. let has_any_statuses = update
abs_path: ActiveValue::set(String::new()), .updated_repositories
entry_ids: ActiveValue::set("[]".into()), .iter()
} .any(|repository| !repository.updated_statuses.is_empty());
}),
if has_any_statuses {
worktree_repository_statuses::Entity::insert_many(
update.updated_repositories.iter().flat_map(
|repository: &proto::RepositoryEntry| {
repository.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
worktree_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
worktree_id: ActiveValue::set(worktree_id),
work_directory_id: ActiveValue::set(
repository.work_directory_id as i64,
),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
})
},
),
) )
.on_conflict( .on_conflict(
OnConflict::columns([ OnConflict::columns([
project_repository::Column::ProjectId, worktree_repository_statuses::Column::ProjectId,
project_repository::Column::Id, worktree_repository_statuses::Column::WorktreeId,
worktree_repository_statuses::Column::WorkDirectoryId,
worktree_repository_statuses::Column::RepoPath,
]) ])
.update_columns([ .update_columns([
project_repository::Column::ScanId, worktree_repository_statuses::Column::ScanId,
project_repository::Column::BranchSummary, worktree_repository_statuses::Column::StatusKind,
project_repository::Column::CurrentMergeConflicts, worktree_repository_statuses::Column::FirstStatus,
worktree_repository_statuses::Column::SecondStatus,
]) ])
.to_owned(), .to_owned(),
) )
.exec(&*tx) .exec(&*tx)
.await?; .await?;
let has_any_statuses = update
.updated_repositories
.iter()
.any(|repository| !repository.updated_statuses.is_empty());
if has_any_statuses {
project_repository_statuses::Entity::insert_many(
update.updated_repositories.iter().flat_map(
|repository: &proto::RepositoryEntry| {
repository.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
project_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
repository_id: ActiveValue::set(
repository.work_directory_id as i64,
),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
})
},
),
)
.on_conflict(
OnConflict::columns([
project_repository_statuses::Column::ProjectId,
project_repository_statuses::Column::RepositoryId,
project_repository_statuses::Column::RepoPath,
])
.update_columns([
project_repository_statuses::Column::ScanId,
project_repository_statuses::Column::StatusKind,
project_repository_statuses::Column::FirstStatus,
project_repository_statuses::Column::SecondStatus,
])
.to_owned(),
)
.exec(&*tx)
.await?;
}
for repo in &update.updated_repositories {
if !repo.removed_statuses.is_empty() {
project_repository_statuses::Entity::update_many()
.filter(
project_repository_statuses::Column::ProjectId
.eq(project_id)
.and(
project_repository_statuses::Column::RepositoryId
.eq(repo.work_directory_id),
)
.and(
project_repository_statuses::Column::RepoPath
.is_in(repo.removed_statuses.iter()),
),
)
.set(project_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
})
.exec(&*tx)
.await?;
}
}
} }
if !update.removed_repositories.is_empty() { let has_any_removed_statuses = update
project_repository::Entity::update_many() .updated_repositories
.iter()
.any(|repository| !repository.removed_statuses.is_empty());
if has_any_removed_statuses {
worktree_repository_statuses::Entity::update_many()
.filter( .filter(
project_repository::Column::ProjectId worktree_repository_statuses::Column::ProjectId
.eq(project_id) .eq(project_id)
.and(project_repository::Column::LegacyWorktreeId.eq(worktree_id)) .and(
.and(project_repository::Column::Id.is_in( worktree_repository_statuses::Column::WorktreeId
update.removed_repositories.iter().map(|id| *id as i64), .eq(worktree_id),
)), )
.and(
worktree_repository_statuses::Column::RepoPath.is_in(
update.updated_repositories.iter().flat_map(|repository| {
repository.removed_statuses.iter()
}),
),
),
) )
.set(project_repository::ActiveModel { .set(worktree_repository_statuses::ActiveModel {
is_deleted: ActiveValue::Set(true), is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64), scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default() ..Default::default()
@@ -462,109 +446,18 @@ impl Database {
} }
} }
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; if !update.removed_repositories.is_empty() {
Ok(connection_ids) worktree_repository::Entity::update_many()
})
.await
}
pub async fn update_repository(
&self,
update: &proto::UpdateRepository,
_connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
let repository_id = update.id as i64;
self.project_transaction(project_id, |tx| async move {
project_repository::Entity::insert(project_repository::ActiveModel {
project_id: ActiveValue::set(project_id),
id: ActiveValue::set(repository_id),
legacy_worktree_id: ActiveValue::set(None),
abs_path: ActiveValue::set(update.abs_path.clone()),
entry_ids: ActiveValue::Set(serde_json::to_string(&update.entry_ids).unwrap()),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
branch_summary: ActiveValue::Set(
update
.branch_summary
.as_ref()
.map(|summary| serde_json::to_string(summary).unwrap()),
),
current_merge_conflicts: ActiveValue::Set(Some(
serde_json::to_string(&update.current_merge_conflicts).unwrap(),
)),
})
.on_conflict(
OnConflict::columns([
project_repository::Column::ProjectId,
project_repository::Column::Id,
])
.update_columns([
project_repository::Column::ScanId,
project_repository::Column::BranchSummary,
project_repository::Column::EntryIds,
project_repository::Column::AbsPath,
project_repository::Column::CurrentMergeConflicts,
])
.to_owned(),
)
.exec(&*tx)
.await?;
let has_any_statuses = !update.updated_statuses.is_empty();
if has_any_statuses {
project_repository_statuses::Entity::insert_many(
update.updated_statuses.iter().map(|status_entry| {
let (repo_path, status_kind, first_status, second_status) =
proto_status_to_db(status_entry.clone());
project_repository_statuses::ActiveModel {
project_id: ActiveValue::set(project_id),
repository_id: ActiveValue::set(repository_id),
scan_id: ActiveValue::set(update.scan_id as i64),
is_deleted: ActiveValue::set(false),
repo_path: ActiveValue::set(repo_path),
status: ActiveValue::set(0),
status_kind: ActiveValue::set(status_kind),
first_status: ActiveValue::set(first_status),
second_status: ActiveValue::set(second_status),
}
}),
)
.on_conflict(
OnConflict::columns([
project_repository_statuses::Column::ProjectId,
project_repository_statuses::Column::RepositoryId,
project_repository_statuses::Column::RepoPath,
])
.update_columns([
project_repository_statuses::Column::ScanId,
project_repository_statuses::Column::StatusKind,
project_repository_statuses::Column::FirstStatus,
project_repository_statuses::Column::SecondStatus,
])
.to_owned(),
)
.exec(&*tx)
.await?;
}
let has_any_removed_statuses = !update.removed_statuses.is_empty();
if has_any_removed_statuses {
project_repository_statuses::Entity::update_many()
.filter( .filter(
project_repository_statuses::Column::ProjectId worktree_repository::Column::ProjectId
.eq(project_id) .eq(project_id)
.and(worktree_repository::Column::WorktreeId.eq(worktree_id))
.and( .and(
project_repository_statuses::Column::RepositoryId.eq(repository_id), worktree_repository::Column::WorkDirectoryId
) .is_in(update.removed_repositories.iter().map(|id| *id as i64)),
.and(
project_repository_statuses::Column::RepoPath
.is_in(update.removed_statuses.iter()),
), ),
) )
.set(project_repository_statuses::ActiveModel { .set(worktree_repository::ActiveModel {
is_deleted: ActiveValue::Set(true), is_deleted: ActiveValue::Set(true),
scan_id: ActiveValue::Set(update.scan_id as i64), scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default() ..Default::default()
@@ -579,34 +472,6 @@ impl Database {
.await .await
} }
pub async fn remove_repository(
&self,
remove: &proto::RemoveRepository,
_connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(remove.project_id);
let repository_id = remove.id as i64;
self.project_transaction(project_id, |tx| async move {
project_repository::Entity::update_many()
.filter(
project_repository::Column::ProjectId
.eq(project_id)
.and(project_repository::Column::Id.eq(repository_id)),
)
.set(project_repository::ActiveModel {
is_deleted: ActiveValue::Set(true),
// scan_id: ActiveValue::Set(update.scan_id as i64),
..Default::default()
})
.exec(&*tx)
.await?;
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
Ok(connection_ids)
})
.await
}
/// Updates the diagnostic summary for the given connection. /// Updates the diagnostic summary for the given connection.
pub async fn update_diagnostic_summary( pub async fn update_diagnostic_summary(
&self, &self,
@@ -838,11 +703,11 @@ impl Database {
root_name: db_worktree.root_name, root_name: db_worktree.root_name,
visible: db_worktree.visible, visible: db_worktree.visible,
entries: Default::default(), entries: Default::default(),
repository_entries: Default::default(),
diagnostic_summaries: Default::default(), diagnostic_summaries: Default::default(),
settings_files: Default::default(), settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64, scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64, completed_scan_id: db_worktree.completed_scan_id as u64,
legacy_repository_entries: Default::default(),
}, },
) )
}) })
@@ -885,77 +750,65 @@ impl Database {
} }
// Populate repository entries. // Populate repository entries.
let mut repositories = Vec::new();
{ {
let db_repository_entries = project_repository::Entity::find() let db_repository_entries = worktree_repository::Entity::find()
.filter( .filter(
Condition::all() Condition::all()
.add(project_repository::Column::ProjectId.eq(project.id)) .add(worktree_repository::Column::ProjectId.eq(project.id))
.add(project_repository::Column::IsDeleted.eq(false)), .add(worktree_repository::Column::IsDeleted.eq(false)),
) )
.all(tx) .all(tx)
.await?; .await?;
for db_repository_entry in db_repository_entries { for db_repository_entry in db_repository_entries {
let mut repository_statuses = project_repository_statuses::Entity::find() if let Some(worktree) = worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
.filter( {
Condition::all() let mut repository_statuses = worktree_repository_statuses::Entity::find()
.add(project_repository_statuses::Column::ProjectId.eq(project.id)) .filter(
.add( Condition::all()
project_repository_statuses::Column::RepositoryId .add(worktree_repository_statuses::Column::ProjectId.eq(project.id))
.eq(db_repository_entry.id), .add(
) worktree_repository_statuses::Column::WorktreeId
.add(project_repository_statuses::Column::IsDeleted.eq(false)), .eq(worktree.id),
) )
.stream(tx) .add(
.await?; worktree_repository_statuses::Column::WorkDirectoryId
let mut updated_statuses = Vec::new(); .eq(db_repository_entry.work_directory_id),
while let Some(status_entry) = repository_statuses.next().await { )
let status_entry = status_entry?; .add(worktree_repository_statuses::Column::IsDeleted.eq(false)),
updated_statuses.push(db_status_to_proto(status_entry)?); )
} .stream(tx)
.await?;
let current_merge_conflicts = db_repository_entry let mut updated_statuses = Vec::new();
.current_merge_conflicts while let Some(status_entry) = repository_statuses.next().await {
.as_ref() let status_entry: worktree_repository_statuses::Model = status_entry?;
.map(|conflicts| serde_json::from_str(&conflicts)) updated_statuses.push(db_status_to_proto(status_entry)?);
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository_entry
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
let entry_ids = serde_json::from_str(&db_repository_entry.entry_ids)
.context("failed to deserialize repository's entry ids")?;
if let Some(worktree_id) = db_repository_entry.legacy_worktree_id {
if let Some(worktree) = worktrees.get_mut(&(worktree_id as u64)) {
worktree.legacy_repository_entries.insert(
db_repository_entry.id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.id as u64,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
branch_summary,
},
);
} }
} else {
repositories.push(proto::UpdateRepository { let current_merge_conflicts = db_repository_entry
project_id: db_repository_entry.project_id.0 as u64, .current_merge_conflicts
id: db_repository_entry.id as u64, .as_ref()
abs_path: db_repository_entry.abs_path, .map(|conflicts| serde_json::from_str(&conflicts))
entry_ids, .transpose()?
updated_statuses, .unwrap_or_default();
removed_statuses: Vec::new(),
current_merge_conflicts, let branch_summary = db_repository_entry
branch_summary, .branch_summary
scan_id: db_repository_entry.scan_id as u64, .as_ref()
}); .map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
worktree.repository_entries.insert(
db_repository_entry.work_directory_id as u64,
proto::RepositoryEntry {
work_directory_id: db_repository_entry.work_directory_id as u64,
branch: db_repository_entry.branch,
updated_statuses,
removed_statuses: Vec::new(),
current_merge_conflicts,
branch_summary,
},
);
} }
} }
} }
@@ -1018,7 +871,6 @@ impl Database {
}) })
.collect(), .collect(),
worktrees, worktrees,
repositories,
language_servers: language_servers language_servers: language_servers
.into_iter() .into_iter()
.map(|language_server| proto::LanguageServer { .map(|language_server| proto::LanguageServer {

View File

@@ -1,5 +1,3 @@
use anyhow::Context as _;
use super::*; use super::*;
impl Database { impl Database {
@@ -608,11 +606,6 @@ impl Database {
let mut worktrees = Vec::new(); let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(tx).await?; let db_worktrees = project.find_related(worktree::Entity).all(tx).await?;
let db_repos = project
.find_related(project_repository::Entity)
.all(tx)
.await?;
for db_worktree in db_worktrees { for db_worktree in db_worktrees {
let mut worktree = RejoinedWorktree { let mut worktree = RejoinedWorktree {
id: db_worktree.id as u64, id: db_worktree.id as u64,
@@ -680,112 +673,96 @@ impl Database {
} }
} }
worktrees.push(worktree); // Repository Entries
} {
let repository_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
let mut removed_repositories = Vec::new(); worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
let mut updated_repositories = Vec::new();
for db_repo in db_repos {
let rejoined_repository = rejoined_project
.repositories
.iter()
.find(|repo| repo.id == db_repo.id as u64);
let repository_filter = if let Some(rejoined_repository) = rejoined_repository {
project_repository::Column::ScanId.gt(rejoined_repository.scan_id)
} else {
project_repository::Column::IsDeleted.eq(false)
};
let db_repositories = project_repository::Entity::find()
.filter(
Condition::all()
.add(project_repository::Column::ProjectId.eq(project.id))
.add(repository_filter),
)
.all(tx)
.await?;
for db_repository in db_repositories.into_iter() {
if db_repository.is_deleted {
removed_repositories.push(db_repository.id as u64);
} else { } else {
let status_entry_filter = if let Some(rejoined_repository) = rejoined_repository worktree_repository::Column::IsDeleted.eq(false)
{ };
project_repository_statuses::Column::ScanId.gt(rejoined_repository.scan_id)
let db_repositories = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
.add(repository_entry_filter),
)
.all(tx)
.await?;
for db_repository in db_repositories.into_iter() {
if db_repository.is_deleted {
worktree
.removed_repositories
.push(db_repository.work_directory_id as u64);
} else { } else {
project_repository_statuses::Column::IsDeleted.eq(false) let status_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree
};
let mut db_statuses = project_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(project_repository_statuses::Column::ProjectId.eq(project.id))
.add(
project_repository_statuses::Column::RepositoryId
.eq(db_repository.id),
)
.add(status_entry_filter),
)
.stream(tx)
.await?;
let mut removed_statuses = Vec::new();
let mut updated_statuses = Vec::new();
while let Some(db_status) = db_statuses.next().await {
let db_status: project_repository_statuses::Model = db_status?;
if db_status.is_deleted {
removed_statuses.push(db_status.repo_path);
} else {
updated_statuses.push(db_status_to_proto(db_status)?);
}
}
let current_merge_conflicts = db_repository
.current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
let entry_ids = serde_json::from_str(&db_repository.entry_ids)
.context("failed to deserialize repository's entry ids")?;
if let Some(legacy_worktree_id) = db_repository.legacy_worktree_id {
if let Some(worktree) = worktrees
.iter_mut()
.find(|worktree| worktree.id as i64 == legacy_worktree_id)
{ {
worktree.updated_repositories.push(proto::RepositoryEntry { worktree_repository_statuses::Column::ScanId
work_directory_id: db_repository.id as u64, .gt(rejoined_worktree.scan_id)
updated_statuses, } else {
removed_statuses, worktree_repository_statuses::Column::IsDeleted.eq(false)
current_merge_conflicts, };
branch_summary,
}); let mut db_statuses = worktree_repository_statuses::Entity::find()
.filter(
Condition::all()
.add(
worktree_repository_statuses::Column::ProjectId
.eq(project.id),
)
.add(
worktree_repository_statuses::Column::WorktreeId
.eq(worktree.id),
)
.add(
worktree_repository_statuses::Column::WorkDirectoryId
.eq(db_repository.work_directory_id),
)
.add(status_entry_filter),
)
.stream(tx)
.await?;
let mut removed_statuses = Vec::new();
let mut updated_statuses = Vec::new();
while let Some(db_status) = db_statuses.next().await {
let db_status: worktree_repository_statuses::Model = db_status?;
if db_status.is_deleted {
removed_statuses.push(db_status.repo_path);
} else {
updated_statuses.push(db_status_to_proto(db_status)?);
}
} }
} else {
updated_repositories.push(proto::UpdateRepository { let current_merge_conflicts = db_repository
entry_ids, .current_merge_conflicts
.as_ref()
.map(|conflicts| serde_json::from_str(&conflicts))
.transpose()?
.unwrap_or_default();
let branch_summary = db_repository
.branch_summary
.as_ref()
.map(|branch_summary| serde_json::from_str(&branch_summary))
.transpose()?
.unwrap_or_default();
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
updated_statuses, updated_statuses,
removed_statuses, removed_statuses,
current_merge_conflicts, current_merge_conflicts,
branch_summary, branch_summary,
project_id: project_id.to_proto(),
id: db_repository.id as u64,
abs_path: db_repository.abs_path,
scan_id: db_repository.scan_id as u64,
}); });
} }
} }
} }
worktrees.push(worktree);
} }
let language_servers = project let language_servers = project
@@ -855,8 +832,6 @@ impl Database {
id: project_id, id: project_id,
old_connection_id, old_connection_id,
collaborators, collaborators,
updated_repositories,
removed_repositories,
worktrees, worktrees,
language_servers, language_servers,
})) }))

View File

@@ -26,8 +26,6 @@ pub mod observed_channel_messages;
pub mod processed_stripe_event; pub mod processed_stripe_event;
pub mod project; pub mod project;
pub mod project_collaborator; pub mod project_collaborator;
pub mod project_repository;
pub mod project_repository_statuses;
pub mod rate_buckets; pub mod rate_buckets;
pub mod room; pub mod room;
pub mod room_participant; pub mod room_participant;
@@ -38,4 +36,6 @@ pub mod user_feature;
pub mod worktree; pub mod worktree;
pub mod worktree_diagnostic_summary; pub mod worktree_diagnostic_summary;
pub mod worktree_entry; pub mod worktree_entry;
pub mod worktree_repository;
pub mod worktree_repository_statuses;
pub mod worktree_settings_file; pub mod worktree_settings_file;

View File

@@ -45,8 +45,6 @@ pub enum Relation {
Room, Room,
#[sea_orm(has_many = "super::worktree::Entity")] #[sea_orm(has_many = "super::worktree::Entity")]
Worktrees, Worktrees,
#[sea_orm(has_many = "super::project_repository::Entity")]
Repositories,
#[sea_orm(has_many = "super::project_collaborator::Entity")] #[sea_orm(has_many = "super::project_collaborator::Entity")]
Collaborators, Collaborators,
#[sea_orm(has_many = "super::language_server::Entity")] #[sea_orm(has_many = "super::language_server::Entity")]
@@ -71,12 +69,6 @@ impl Related<super::worktree::Entity> for Entity {
} }
} }
impl Related<super::project_repository::Entity> for Entity {
fn to() -> RelationDef {
Relation::Repositories.def()
}
}
impl Related<super::project_collaborator::Entity> for Entity { impl Related<super::project_collaborator::Entity> for Entity {
fn to() -> RelationDef { fn to() -> RelationDef {
Relation::Collaborators.def() Relation::Collaborators.def()

View File

@@ -2,17 +2,16 @@ use crate::db::ProjectId;
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "project_repositories")] #[sea_orm(table_name = "worktree_repositories")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub project_id: ProjectId, pub project_id: ProjectId,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub id: i64, pub worktree_id: i64,
pub abs_path: String, #[sea_orm(primary_key)]
pub legacy_worktree_id: Option<i64>, pub work_directory_id: i64,
// JSON array containing 1 or more integer project entry ids
pub entry_ids: String,
pub scan_id: i64, pub scan_id: i64,
pub branch: Option<String>,
pub is_deleted: bool, pub is_deleted: bool,
// JSON array typed string // JSON array typed string
pub current_merge_conflicts: Option<String>, pub current_merge_conflicts: Option<String>,
@@ -21,19 +20,6 @@ pub struct Model {
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation { pub enum Relation {}
#[sea_orm(
belongs_to = "super::project::Entity",
from = "Column::ProjectId",
to = "super::project::Column::Id"
)]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@@ -2,12 +2,14 @@ use crate::db::ProjectId;
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "project_repository_statuses")] #[sea_orm(table_name = "worktree_repository_statuses")]
pub struct Model { pub struct Model {
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub project_id: ProjectId, pub project_id: ProjectId,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub repository_id: i64, pub worktree_id: i64,
#[sea_orm(primary_key)]
pub work_directory_id: i64,
#[sea_orm(primary_key)] #[sea_orm(primary_key)]
pub repo_path: String, pub repo_path: String,
/// Old single-code status field, no longer used but kept here to mirror the DB schema. /// Old single-code status field, no longer used but kept here to mirror the DB schema.

View File

@@ -37,7 +37,6 @@ use core::fmt::{self, Debug, Formatter};
use http_client::HttpClient; use http_client::HttpClient;
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
use reqwest_client::ReqwestClient; use reqwest_client::ReqwestClient;
use rpc::proto::split_repository_update;
use sha2::Digest; use sha2::Digest;
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
@@ -292,8 +291,6 @@ impl Server {
.add_message_handler(leave_project) .add_message_handler(leave_project)
.add_request_handler(update_project) .add_request_handler(update_project)
.add_request_handler(update_worktree) .add_request_handler(update_worktree)
.add_request_handler(update_repository)
.add_request_handler(remove_repository)
.add_message_handler(start_language_server) .add_message_handler(start_language_server)
.add_message_handler(update_language_server) .add_message_handler(update_language_server)
.add_message_handler(update_diagnostic_summary) .add_message_handler(update_diagnostic_summary)
@@ -304,7 +301,6 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::GetReferences>) .add_request_handler(forward_read_only_project_request::<proto::GetReferences>)
.add_request_handler(forward_find_search_candidates_request) .add_request_handler(forward_find_search_candidates_request)
.add_request_handler(forward_read_only_project_request::<proto::GetDocumentHighlights>) .add_request_handler(forward_read_only_project_request::<proto::GetDocumentHighlights>)
.add_request_handler(forward_read_only_project_request::<proto::GetDocumentSymbols>)
.add_request_handler(forward_read_only_project_request::<proto::GetProjectSymbols>) .add_request_handler(forward_read_only_project_request::<proto::GetProjectSymbols>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferForSymbol>) .add_request_handler(forward_read_only_project_request::<proto::OpenBufferForSymbol>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferById>) .add_request_handler(forward_read_only_project_request::<proto::OpenBufferById>)
@@ -1468,7 +1464,7 @@ fn notify_rejoined_projects(
removed_repositories: worktree.removed_repositories, removed_repositories: worktree.removed_repositories,
}; };
for update in proto::split_worktree_update(message) { for update in proto::split_worktree_update(message) {
session.peer.send(session.connection_id, update)?; session.peer.send(session.connection_id, update.clone())?;
} }
// Stream this worktree's diagnostics. // Stream this worktree's diagnostics.
@@ -1497,23 +1493,21 @@ fn notify_rejoined_projects(
} }
} }
for repository in mem::take(&mut project.updated_repositories) { for language_server in &project.language_servers {
for update in split_repository_update(repository) {
session.peer.send(session.connection_id, update)?;
}
}
for id in mem::take(&mut project.removed_repositories) {
session.peer.send( session.peer.send(
session.connection_id, session.connection_id,
proto::RemoveRepository { proto::UpdateLanguageServer {
project_id: project.id.to_proto(), project_id: project.id.to_proto(),
id, language_server_id: language_server.id,
variant: Some(
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
proto::LspDiskBasedDiagnosticsUpdated {},
),
),
}, },
)?; )?;
} }
} }
Ok(()) Ok(())
} }
@@ -1899,7 +1893,7 @@ fn join_project_internal(
removed_entries: Default::default(), removed_entries: Default::default(),
scan_id: worktree.scan_id, scan_id: worktree.scan_id,
is_last_update: worktree.scan_id == worktree.completed_scan_id, is_last_update: worktree.scan_id == worktree.completed_scan_id,
updated_repositories: worktree.legacy_repository_entries.into_values().collect(), updated_repositories: worktree.repository_entries.into_values().collect(),
removed_repositories: Default::default(), removed_repositories: Default::default(),
}; };
for update in proto::split_worktree_update(message) { for update in proto::split_worktree_update(message) {
@@ -1932,12 +1926,6 @@ fn join_project_internal(
} }
} }
for repository in mem::take(&mut project.repositories) {
for update in split_repository_update(repository) {
session.peer.send(session.connection_id, update)?;
}
}
for language_server in &project.language_servers { for language_server in &project.language_servers {
session.peer.send( session.peer.send(
session.connection_id, session.connection_id,
@@ -2030,54 +2018,6 @@ async fn update_worktree(
Ok(()) Ok(())
} }
async fn update_repository(
request: proto::UpdateRepository,
response: Response<proto::UpdateRepository>,
session: Session,
) -> Result<()> {
let guest_connection_ids = session
.db()
.await
.update_repository(&request, session.connection_id)
.await?;
broadcast(
Some(session.connection_id),
guest_connection_ids.iter().copied(),
|connection_id| {
session
.peer
.forward_send(session.connection_id, connection_id, request.clone())
},
);
response.send(proto::Ack {})?;
Ok(())
}
async fn remove_repository(
request: proto::RemoveRepository,
response: Response<proto::RemoveRepository>,
session: Session,
) -> Result<()> {
let guest_connection_ids = session
.db()
.await
.remove_repository(&request, session.connection_id)
.await?;
broadcast(
Some(session.connection_id),
guest_connection_ids.iter().copied(),
|connection_id| {
session
.peer
.forward_send(session.connection_id, connection_id, request.clone())
},
);
response.send(proto::Ack {})?;
Ok(())
}
/// Updates other participants with changes to the diagnostics /// Updates other participants with changes to the diagnostics
async fn update_diagnostic_summary( async fn update_diagnostic_summary(
message: proto::UpdateDiagnosticSummary, message: proto::UpdateDiagnosticSummary,

View File

@@ -348,7 +348,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
// Return some completions from the host's language server. // Return some completions from the host's language server.
cx_a.executor().start_waiting(); cx_a.executor().start_waiting();
fake_language_server fake_language_server
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move { .handle_request::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document_position.text_document.uri, params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
@@ -412,7 +412,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
// Return a resolved completion from the host's language server. // Return a resolved completion from the host's language server.
// The resolved completion has an additional text edit. // The resolved completion has an additional text edit.
fake_language_server.set_request_handler::<lsp::request::ResolveCompletionItem, _, _>( fake_language_server.handle_request::<lsp::request::ResolveCompletionItem, _, _>(
|params, _| async move { |params, _| async move {
assert_eq!(params.label, "first_method(…)"); assert_eq!(params.label, "first_method(…)");
Ok(lsp::CompletionItem { Ok(lsp::CompletionItem {
@@ -465,7 +465,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
}); });
let mut completion_response = fake_language_server let mut completion_response = fake_language_server
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move { .handle_request::<lsp::request::Completion, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document_position.text_document.uri, params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
@@ -496,7 +496,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
// The completion now gets a new `text_edit.new_text` when resolving the completion item // The completion now gets a new `text_edit.new_text` when resolving the completion item
let mut resolve_completion_response = fake_language_server let mut resolve_completion_response = fake_language_server
.set_request_handler::<lsp::request::ResolveCompletionItem, _, _>(|params, _| async move { .handle_request::<lsp::request::ResolveCompletionItem, _, _>(|params, _| async move {
assert_eq!(params.label, "third_method(…)"); assert_eq!(params.label, "third_method(…)");
Ok(lsp::CompletionItem { Ok(lsp::CompletionItem {
label: "third_method(…)".into(), label: "third_method(…)".into(),
@@ -589,7 +589,7 @@ async fn test_collaborating_with_code_actions(
let mut fake_language_server = fake_language_servers.next().await.unwrap(); let mut fake_language_server = fake_language_servers.next().await.unwrap();
let mut requests = fake_language_server let mut requests = fake_language_server
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document.uri, params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
@@ -611,7 +611,7 @@ async fn test_collaborating_with_code_actions(
cx_b.focus(&editor_b); cx_b.focus(&editor_b);
let mut requests = fake_language_server let mut requests = fake_language_server
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document.uri, params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
@@ -689,7 +689,7 @@ async fn test_collaborating_with_code_actions(
Editor::confirm_code_action(editor, &ConfirmCodeAction { item_ix: Some(0) }, window, cx) Editor::confirm_code_action(editor, &ConfirmCodeAction { item_ix: Some(0) }, window, cx)
}) })
.unwrap(); .unwrap();
fake_language_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>( fake_language_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
|_, _| async move { |_, _| async move {
Ok(lsp::CodeAction { Ok(lsp::CodeAction {
title: "Inline into all callers".to_string(), title: "Inline into all callers".to_string(),
@@ -812,7 +812,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
}); });
fake_language_server fake_language_server
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs"); assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
assert_eq!(params.position, lsp::Position::new(0, 7)); assert_eq!(params.position, lsp::Position::new(0, 7));
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
@@ -855,7 +855,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
}); });
fake_language_server fake_language_server
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs"); assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
assert_eq!(params.position, lsp::Position::new(0, 8)); assert_eq!(params.position, lsp::Position::new(0, 8));
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new( Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
@@ -891,7 +891,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
Editor::confirm_rename(editor, &ConfirmRename, window, cx).unwrap() Editor::confirm_rename(editor, &ConfirmRename, window, cx).unwrap()
}); });
fake_language_server fake_language_server
.set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move { .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document_position.text_document.uri.as_str(), params.text_document_position.text_document.uri.as_str(),
"file:///dir/one.rs" "file:///dir/one.rs"
@@ -1321,7 +1321,7 @@ async fn test_on_input_format_from_host_to_guest(
// Receive an OnTypeFormatting request as the host's language server. // Receive an OnTypeFormatting request as the host's language server.
// Return some formatting from the host's language server. // Return some formatting from the host's language server.
fake_language_server.set_request_handler::<lsp::request::OnTypeFormatting, _, _>( fake_language_server.handle_request::<lsp::request::OnTypeFormatting, _, _>(
|params, _| async move { |params, _| async move {
assert_eq!( assert_eq!(
params.text_document_position.text_document.uri, params.text_document_position.text_document.uri,
@@ -1452,7 +1452,7 @@ async fn test_on_input_format_from_guest_to_host(
// Return some formatting from the host's language server. // Return some formatting from the host's language server.
executor.start_waiting(); executor.start_waiting();
fake_language_server fake_language_server
.set_request_handler::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move { .handle_request::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document_position.text_document.uri, params.text_document_position.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
@@ -1624,7 +1624,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
let edits_made = Arc::new(AtomicUsize::new(0)); let edits_made = Arc::new(AtomicUsize::new(0));
let closure_edits_made = Arc::clone(&edits_made); let closure_edits_made = Arc::clone(&edits_made);
fake_language_server fake_language_server
.set_request_handler::<lsp::request::InlayHintRequest, _, _>(move |params, _| { .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_edits_made = Arc::clone(&closure_edits_made); let task_edits_made = Arc::clone(&closure_edits_made);
async move { async move {
assert_eq!( assert_eq!(
@@ -1859,7 +1859,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
let closure_other_hints = Arc::clone(&other_hints); let closure_other_hints = Arc::clone(&other_hints);
fake_language_server fake_language_server
.set_request_handler::<lsp::request::InlayHintRequest, _, _>(move |params, _| { .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_other_hints = Arc::clone(&closure_other_hints); let task_other_hints = Arc::clone(&closure_other_hints);
async move { async move {
assert_eq!( assert_eq!(

View File

@@ -26,7 +26,7 @@ use language::{
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter, tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter,
Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope, Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
}; };
use lsp::{LanguageServerId, OneOf}; use lsp::LanguageServerId;
use parking_lot::Mutex; use parking_lot::Mutex;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::{ use project::{
@@ -2847,7 +2847,7 @@ async fn test_git_diff_base_change(
}); });
} }
#[gpui::test(iterations = 10)] #[gpui::test]
async fn test_git_branch_name( async fn test_git_branch_name(
executor: BackgroundExecutor, executor: BackgroundExecutor,
cx_a: &mut TestAppContext, cx_a: &mut TestAppContext,
@@ -2895,10 +2895,9 @@ async fn test_git_branch_name(
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>(); let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
assert_eq!(worktrees.len(), 1); assert_eq!(worktrees.len(), 1);
let worktree = worktrees[0].clone(); let worktree = worktrees[0].clone();
let snapshot = worktree.read(cx).snapshot(); let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap();
let repo = snapshot.repositories().first().unwrap();
assert_eq!( assert_eq!(
repo.branch().map(|branch| branch.name.to_string()), root_entry.branch().map(|branch| branch.name.to_string()),
branch_name branch_name
); );
} }
@@ -4480,7 +4479,7 @@ async fn test_formatting_buffer(
project.register_buffer_with_language_servers(&buffer_b, cx) project.register_buffer_with_language_servers(&buffer_b, cx)
}); });
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::request::Formatting, _, _>(|_, _| async move { fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
Ok(Some(vec![ Ok(Some(vec![
lsp::TextEdit { lsp::TextEdit {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)), range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)),
@@ -4633,7 +4632,7 @@ async fn test_prettier_formatting_buffer(
}); });
}); });
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::request::Formatting, _, _>(|_, _| async move { fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
panic!( panic!(
"Unexpected: prettier should be preferred since it's enabled and language supports it" "Unexpected: prettier should be preferred since it's enabled and language supports it"
) )
@@ -4731,16 +4730,14 @@ async fn test_definition(
// Request the definition of a symbol as the guest. // Request the definition of a symbol as the guest.
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::request::GotoDefinition, _, _>( fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
|_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar(
Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new(
lsp::Location::new( lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(), lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), ),
), )))
))) });
},
);
let definitions_1 = project_b let definitions_1 = project_b
.update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx)) .update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx))
@@ -4762,16 +4759,14 @@ async fn test_definition(
// Try getting more definitions for the same buffer, ensuring the buffer gets reused from // Try getting more definitions for the same buffer, ensuring the buffer gets reused from
// the previous call to `definition`. // the previous call to `definition`.
fake_language_server.set_request_handler::<lsp::request::GotoDefinition, _, _>( fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
|_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar(
Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new(
lsp::Location::new( lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(), lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)), ),
), )))
))) });
},
);
let definitions_2 = project_b let definitions_2 = project_b
.update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx)) .update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx))
@@ -4795,7 +4790,7 @@ async fn test_definition(
definitions_2[0].target.buffer definitions_2[0].target.buffer
); );
fake_language_server.set_request_handler::<lsp::request::GotoTypeDefinition, _, _>( fake_language_server.handle_request::<lsp::request::GotoTypeDefinition, _, _>(
|req, _| async move { |req, _| async move {
assert_eq!( assert_eq!(
req.text_document_position_params.position, req.text_document_position_params.position,
@@ -4885,7 +4880,7 @@ async fn test_references(
// Request references to a symbol as the guest. // Request references to a symbol as the guest.
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
let (lsp_response_tx, rx) = mpsc::unbounded::<Result<Option<Vec<lsp::Location>>>>(); let (lsp_response_tx, rx) = mpsc::unbounded::<Result<Option<Vec<lsp::Location>>>>();
fake_language_server.set_request_handler::<lsp::request::References, _, _>({ fake_language_server.handle_request::<lsp::request::References, _, _>({
let rx = Arc::new(Mutex::new(Some(rx))); let rx = Arc::new(Mutex::new(Some(rx)));
move |params, _| { move |params, _| {
assert_eq!( assert_eq!(
@@ -5135,7 +5130,7 @@ async fn test_document_highlights(
// Request document highlights as the guest. // Request document highlights as the guest.
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::request::DocumentHighlightRequest, _, _>( fake_language_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>(
|params, _| async move { |params, _| async move {
assert_eq!( assert_eq!(
params params
@@ -5272,7 +5267,7 @@ async fn test_lsp_hover(
"CrabLang-ls" => { "CrabLang-ls" => {
servers_with_hover_requests.insert( servers_with_hover_requests.insert(
new_server_name.clone(), new_server_name.clone(),
new_server.set_request_handler::<lsp::request::HoverRequest, _, _>( new_server.handle_request::<lsp::request::HoverRequest, _, _>(
move |params, _| { move |params, _| {
assert_eq!( assert_eq!(
params params
@@ -5298,7 +5293,7 @@ async fn test_lsp_hover(
"rust-analyzer" => { "rust-analyzer" => {
servers_with_hover_requests.insert( servers_with_hover_requests.insert(
new_server_name.clone(), new_server_name.clone(),
new_server.set_request_handler::<lsp::request::HoverRequest, _, _>( new_server.handle_request::<lsp::request::HoverRequest, _, _>(
|params, _| async move { |params, _| async move {
assert_eq!( assert_eq!(
params params
@@ -5399,16 +5394,9 @@ async fn test_project_symbols(
let active_call_a = cx_a.read(ActiveCall::global); let active_call_a = cx_a.read(ActiveCall::global);
client_a.language_registry().add(rust_lang()); client_a.language_registry().add(rust_lang());
let mut fake_language_servers = client_a.language_registry().register_fake_lsp( let mut fake_language_servers = client_a
"Rust", .language_registry()
FakeLspAdapter { .register_fake_lsp("Rust", Default::default());
capabilities: lsp::ServerCapabilities {
workspace_symbol_provider: Some(OneOf::Left(true)),
..Default::default()
},
..Default::default()
},
);
client_a client_a
.fs() .fs()
@@ -5443,24 +5431,22 @@ async fn test_project_symbols(
.unwrap(); .unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::WorkspaceSymbolRequest, _, _>( fake_language_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(|_, _| async move {
|_, _| async move { Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![
Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![ #[allow(deprecated)]
#[allow(deprecated)] lsp::SymbolInformation {
lsp::SymbolInformation { name: "TWO".into(),
name: "TWO".into(), location: lsp::Location {
location: lsp::Location { uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(), range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
},
kind: lsp::SymbolKind::CONSTANT,
tags: None,
container_name: None,
deprecated: None,
}, },
]))) kind: lsp::SymbolKind::CONSTANT,
}, tags: None,
); container_name: None,
deprecated: None,
},
])))
});
// Request the definition of a symbol as the guest. // Request the definition of a symbol as the guest.
let symbols = project_b let symbols = project_b
@@ -5542,16 +5528,14 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
.unwrap(); .unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.set_request_handler::<lsp::request::GotoDefinition, _, _>( fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
|_, _| async move { Ok(Some(lsp::GotoDefinitionResponse::Scalar(
Ok(Some(lsp::GotoDefinitionResponse::Scalar( lsp::Location::new(
lsp::Location::new( lsp::Url::from_file_path("/root/b.rs").unwrap(),
lsp::Url::from_file_path("/root/b.rs").unwrap(), lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)), ),
), )))
))) });
},
);
let definitions; let definitions;
let buffer_b2; let buffer_b2;
@@ -6787,7 +6771,7 @@ async fn test_remote_git_branches(
.map(ToString::to_string) .map(ToString::to_string)
.collect::<HashSet<_>>(); .collect::<HashSet<_>>();
let (project_a, _) = client_a.build_local_project("/project", cx_a).await; let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
let project_id = active_call_a let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
@@ -6800,6 +6784,8 @@ async fn test_remote_git_branches(
let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap());
let root_path = ProjectPath::root_path(worktree_id);
let branches_b = cx_b let branches_b = cx_b
.update(|cx| repo_b.update(cx, |repository, _| repository.branches())) .update(|cx| repo_b.update(cx, |repository, _| repository.branches()))
.await .await
@@ -6824,15 +6810,11 @@ async fn test_remote_git_branches(
let host_branch = cx_a.update(|cx| { let host_branch = cx_a.update(|cx| {
project_a.update(cx, |project, cx| { project_a.update(cx, |project, cx| {
project project.worktree_store().update(cx, |worktree_store, cx| {
.repositories(cx) worktree_store
.values() .current_branch(root_path.clone(), cx)
.next() .unwrap()
.unwrap() })
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}); });
@@ -6861,15 +6843,9 @@ async fn test_remote_git_branches(
let host_branch = cx_a.update(|cx| { let host_branch = cx_a.update(|cx| {
project_a.update(cx, |project, cx| { project_a.update(cx, |project, cx| {
project project.worktree_store().update(cx, |worktree_store, cx| {
.repositories(cx) worktree_store.current_branch(root_path, cx).unwrap()
.values() })
.next()
.unwrap()
.read(cx)
.current_branch()
.unwrap()
.clone()
}) })
}); });

Some files were not shown because too many files have changed in this diff Show More