Compare commits

..

1 Commits

Author SHA1 Message Date
Ben Brandt
a6a5f55a05 Add GitHub token authentication to HTTP client
Automatically adds GitHub authentication headers when GITHUB_TOKEN
environment variable is set.
2025-06-05 20:22:38 +02:00
314 changed files with 4696 additions and 14636 deletions

View File

@@ -1,4 +1,4 @@
name: Bug Report (AI)
name: Bug Report (AI Related)
description: Zed Agent Panel Bugs
type: "Bug"
labels: ["ai"]
@@ -19,14 +19,15 @@ body:
2.
3.
**Expected Behavior**:
**Actual Behavior**:
Actual Behavior:
Expected Behavior:
### Model Provider Details
- Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc)
- Model Name:
- Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
- Other Details (MCPs, other settings, etc):
- MCP Servers in-use:
- Other Details:
validations:
required: true

View File

@@ -0,0 +1,36 @@
name: Bug Report (Edit Predictions)
description: Zed Edit Predictions bugs
type: "Bug"
labels: ["ai", "inline completion", "zeta"]
title: "Edit Predictions: <a short description of the Edit Prediction bug>"
body:
- type: textarea
attributes:
label: Summary
description: Describe the bug with a one line summary, and provide detailed reproduction steps
value: |
<!-- Please insert a one line summary of the issue below -->
SUMMARY_SENTENCE_HERE
### Description
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
<!-- Please include the LLM provider and model name you are using -->
Steps to trigger the problem:
1.
2.
3.
Actual Behavior:
Expected Behavior:
validations:
required: true
- type: textarea
id: environment
attributes:
label: Zed Version and System Specs
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
placeholder: |
Output of "zed: copy system specs into clipboard"
validations:
required: true

35
.github/ISSUE_TEMPLATE/03_bug_git.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: Bug Report (Git)
description: Zed Git-Related Bugs
type: "Bug"
labels: ["git"]
title: "Git: <a short description of the Git bug>"
body:
- type: textarea
attributes:
label: Summary
description: Describe the bug with a one line summary, and provide detailed reproduction steps
value: |
<!-- Please insert a one line summary of the issue below -->
SUMMARY_SENTENCE_HERE
### Description
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
Steps to trigger the problem:
1.
2.
3.
Actual Behavior:
Expected Behavior:
validations:
required: true
- type: textarea
id: environment
attributes:
label: Zed Version and System Specs
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
placeholder: |
Output of "zed: copy system specs into clipboard"
validations:
required: true

View File

@@ -19,8 +19,8 @@ body:
2.
3.
**Expected Behavior**:
**Actual Behavior**:
Actual Behavior:
Expected Behavior:
validations:
required: true

View File

@@ -18,16 +18,14 @@ body:
- Issues with insufficient detail may be summarily closed.
-->
DESCRIPTION_HERE
Steps to reproduce:
1.
2.
3.
4.
**Expected Behavior**:
**Actual Behavior**:
Expected Behavior:
Actual Behavior:
<!-- Before Submitting, did you:
1. Include settings.json, keymap.json, .editorconfig if relevant?

View File

@@ -19,12 +19,6 @@ runs:
shell: bash -euxo pipefail {0}
run: ./script/linux
- name: Check for broken links
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1
with:
args: --no-progress './docs/src/**/*'
fail: true
- name: Build book
shell: bash -euxo pipefail {0}
run: |

View File

@@ -1,12 +1,6 @@
name: "Run tests"
description: "Runs the tests"
inputs:
use-xvfb:
description: "Whether to run tests with xvfb"
required: false
default: "false"
runs:
using: "composite"
steps:
@@ -26,9 +20,4 @@ runs:
- name: Run tests
shell: bash -euxo pipefail {0}
run: |
if [ "${{ inputs.use-xvfb }}" == "true" ]; then
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24 -nolisten tcp" cargo nextest run --workspace --no-fail-fast
else
cargo nextest run --workspace --no-fail-fast
fi
run: cargo nextest run --workspace --no-fail-fast

View File

@@ -183,9 +183,6 @@ jobs:
- name: Check for todo! and FIXME comments
run: script/check-todos
- name: Check modifier use in keymaps
run: script/check-keymaps
- name: Run style checks
uses: ./.github/actions/check_style
@@ -319,8 +316,6 @@ jobs:
- name: Run tests
uses: ./.github/actions/run_tests
with:
use-xvfb: true
- name: Build other binaries and features
run: |
@@ -741,64 +736,6 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
freebsd:
timeout-minutes: 60
runs-on: github-8vcpu-ubuntu-2404
if: |
startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
needs: [linux_tests]
name: Build Zed on FreeBSD
# env:
# MYTOKEN : ${{ secrets.MYTOKEN }}
# MYTOKEN2: "value2"
steps:
- uses: actions/checkout@v4
- name: Build FreeBSD remote-server
id: freebsd-build
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
with:
# envs: "MYTOKEN MYTOKEN2"
usesh: true
release: 13.5
copyback: true
prepare: |
pkg install -y \
bash curl jq git \
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
run: |
freebsd-version
sysctl hw.model
sysctl hw.ncpu
sysctl hw.physmem
sysctl hw.usermem
git config --global --add safe.directory /home/runner/work/zed/zed
rustup-init --profile minimal --default-toolchain none -y
. "$HOME/.cargo/env"
./script/bundle-freebsd
mkdir -p out/
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
rm -rf target/
cargo clean
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
path: out/zed-remote-server-freebsd-x86_64.gz
- name: Upload Artifacts to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
out/zed-remote-server-freebsd-x86_64.gz
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
nix-build:
name: Build with Nix
uses: ./.github/workflows/nix.yml
@@ -813,12 +750,12 @@ jobs:
if: |
startsWith(github.ref, 'refs/tags/v')
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, freebsd]
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
runs-on:
- self-hosted
- bundle
steps:
- name: gh release
run: gh release edit $GITHUB_REF_NAME --draft=false
run: gh release edit $GITHUB_REF_NAME --draft=true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -167,50 +167,6 @@ jobs:
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
freebsd:
timeout-minutes: 60
if: github.repository_owner == 'zed-industries'
runs-on: github-8vcpu-ubuntu-2404
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
name: Build Zed on FreeBSD
# env:
# MYTOKEN : ${{ secrets.MYTOKEN }}
# MYTOKEN2: "value2"
steps:
- uses: actions/checkout@v4
- name: Build FreeBSD remote-server
id: freebsd-build
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
with:
# envs: "MYTOKEN MYTOKEN2"
usesh: true
release: 13.5
copyback: true
prepare: |
pkg install -y \
bash curl jq git \
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
run: |
freebsd-version
sysctl hw.model
sysctl hw.ncpu
sysctl hw.physmem
sysctl hw.usermem
git config --global --add safe.directory /home/runner/work/zed/zed
rustup-init --profile minimal --default-toolchain none -y
. "$HOME/.cargo/env"
./script/bundle-freebsd
mkdir -p out/
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
rm -rf target/
cargo clean
- name: Upload Zed Nightly
run: script/upload-nightly freebsd
bundle-nix:
name: Build and cache Nix package
needs: tests

View File

@@ -62,11 +62,11 @@ jobs:
- name: Run unit evals
shell: bash -euxo pipefail {0}
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)'
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)' --test-threads 1
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
- name: Send failure message to Slack channel if needed
- name: Send the pull request link into the Slack channel
if: ${{ failure() }}
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
with:

View File

@@ -47,7 +47,6 @@
"remove_trailing_whitespace_on_save": true,
"ensure_final_newline_on_save": true,
"file_scan_exclusions": [
"crates/assistant_tools/src/evals/fixtures",
"crates/eval/worktrees/",
"crates/eval/repos/",
"**/.git",

71
Cargo.lock generated
View File

@@ -59,7 +59,7 @@ dependencies = [
"assistant_slash_command",
"assistant_slash_commands",
"assistant_tool",
"assistant_tools",
"async-watch",
"audio",
"buffer_diff",
"chrono",
@@ -99,7 +99,6 @@ dependencies = [
"paths",
"picker",
"postage",
"pretty_assertions",
"project",
"prompt_store",
"proto",
@@ -131,7 +130,6 @@ dependencies = [
"urlencoding",
"util",
"uuid",
"watch",
"workspace",
"workspace-hack",
"zed_actions",
@@ -149,6 +147,7 @@ dependencies = [
"deepseek",
"fs",
"gpui",
"indexmap",
"language_model",
"lmstudio",
"log",
@@ -632,6 +631,7 @@ name = "assistant_tool"
version = "0.1.0"
dependencies = [
"anyhow",
"async-watch",
"buffer_diff",
"clock",
"collections",
@@ -653,7 +653,6 @@ dependencies = [
"settings",
"text",
"util",
"watch",
"workspace",
"workspace-hack",
"zlog",
@@ -666,6 +665,7 @@ dependencies = [
"agent_settings",
"anyhow",
"assistant_tool",
"async-watch",
"buffer_diff",
"chrono",
"client",
@@ -705,7 +705,6 @@ dependencies = [
"serde_json",
"settings",
"smallvec",
"smol",
"streaming_diff",
"strsim",
"task",
@@ -717,7 +716,6 @@ dependencies = [
"ui",
"unindent",
"util",
"watch",
"web_search",
"which 6.0.3",
"workspace",
@@ -1076,6 +1074,15 @@ dependencies = [
"tungstenite 0.26.2",
]
[[package]]
name = "async-watch"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a078faf4e27c0c6cc0efb20e5da59dcccc04968ebf2801d8e0b2195124cdcdb2"
dependencies = [
"event-listener 2.5.3",
]
[[package]]
name = "async_zip"
version = "0.0.17"
@@ -2980,6 +2987,7 @@ dependencies = [
"anyhow",
"assistant_context_editor",
"assistant_slash_command",
"assistant_tool",
"async-stripe",
"async-trait",
"async-tungstenite",
@@ -3161,16 +3169,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "command-fds"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ec1052629a80c28594777d1252efc8a6b005d13f9edfd8c3fc0f44d5b32489a"
dependencies = [
"nix 0.30.1",
"thiserror 2.0.12",
]
[[package]]
name = "command_palette"
version = "0.1.0"
@@ -4063,7 +4061,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"collections",
"dap",
"futures 0.3.31",
"gpui",
@@ -4237,7 +4234,6 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"gpui",
"itertools 0.14.0",
"language",
"log",
"menu",
@@ -5017,6 +5013,7 @@ dependencies = [
"assistant_tool",
"assistant_tools",
"async-trait",
"async-watch",
"buffer_diff",
"chrono",
"clap",
@@ -5058,7 +5055,6 @@ dependencies = [
"unindent",
"util",
"uuid",
"watch",
"workspace-hack",
"zed_llm_client",
]
@@ -8743,6 +8739,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"async-watch",
"clock",
"collections",
"ctor",
@@ -8792,7 +8789,6 @@ dependencies = [
"unicase",
"unindent",
"util",
"watch",
"workspace-hack",
"zlog",
]
@@ -10142,18 +10138,6 @@ dependencies = [
"memoffset",
]
[[package]]
name = "nix"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags 2.9.0",
"cfg-if",
"cfg_aliases 0.2.1",
"libc",
]
[[package]]
name = "node_runtime"
version = "0.1.0"
@@ -10163,6 +10147,7 @@ dependencies = [
"async-std",
"async-tar",
"async-trait",
"async-watch",
"futures 0.3.31",
"http_client",
"log",
@@ -10172,7 +10157,6 @@ dependencies = [
"serde_json",
"smol",
"util",
"watch",
"which 6.0.3",
"workspace-hack",
]
@@ -10221,7 +10205,6 @@ dependencies = [
"util",
"workspace",
"workspace-hack",
"zed_actions",
]
[[package]]
@@ -12134,6 +12117,7 @@ dependencies = [
"unindent",
"url",
"util",
"uuid",
"which 6.0.3",
"workspace-hack",
"worktree",
@@ -13022,6 +13006,7 @@ dependencies = [
"askpass",
"assistant_tool",
"assistant_tools",
"async-watch",
"backtrace",
"cargo_toml",
"chrono",
@@ -13068,7 +13053,6 @@ dependencies = [
"toml 0.8.20",
"unindent",
"util",
"watch",
"worktree",
"zlog",
]
@@ -15748,7 +15732,6 @@ dependencies = [
"task",
"theme",
"thiserror 2.0.12",
"url",
"util",
"windows 0.61.1",
"workspace-hack",
@@ -17145,7 +17128,6 @@ dependencies = [
"async-fs",
"async_zip",
"collections",
"command-fds",
"dirs 4.0.0",
"dunce",
"futures 0.3.31",
@@ -17931,19 +17913,6 @@ dependencies = [
"leb128",
]
[[package]]
name = "watch"
version = "0.1.0"
dependencies = [
"ctor",
"futures 0.3.31",
"gpui",
"parking_lot",
"rand 0.8.5",
"workspace-hack",
"zlog",
]
[[package]]
name = "wayland-backend"
version = "0.3.8"
@@ -19755,6 +19724,7 @@ dependencies = [
"assistant_context_editor",
"assistant_tool",
"assistant_tools",
"async-watch",
"audio",
"auto_update",
"auto_update_ui",
@@ -19871,7 +19841,6 @@ dependencies = [
"uuid",
"vim",
"vim_mode_setting",
"watch",
"web_search",
"web_search_providers",
"welcome",

View File

@@ -165,7 +165,6 @@ members = [
"crates/util_macros",
"crates/vim",
"crates/vim_mode_setting",
"crates/watch",
"crates/web_search",
"crates/web_search_providers",
"crates/welcome",
@@ -374,7 +373,6 @@ util = { path = "crates/util" }
util_macros = { path = "crates/util_macros" }
vim = { path = "crates/vim" }
vim_mode_setting = { path = "crates/vim_mode_setting" }
watch = { path = "crates/watch" }
web_search = { path = "crates/web_search" }
web_search_providers = { path = "crates/web_search_providers" }
welcome = { path = "crates/welcome" }
@@ -405,6 +403,7 @@ async-recursion = "1.0.0"
async-tar = "0.5.0"
async-trait = "0.1"
async-tungstenite = "0.29.1"
async-watch = "0.3.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
aws-credential-types = { version = "1.2.2", features = [

View File

@@ -35,7 +35,7 @@
"ctrl-shift-f5": "debugger::Restart",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"ctrl-f11": "debugger::StepInto",
"cmd-f11": "debugger::StepInto",
"shift-f11": "debugger::StepOut",
"f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "edit_prediction::RateCompletions",
@@ -59,6 +59,7 @@
"tab": "editor::Tab",
"shift-tab": "editor::Backtab",
"ctrl-k": "editor::CutToEndOfLine",
// "ctrl-t": "editor::Transpose",
"ctrl-k ctrl-q": "editor::Rewrap",
"ctrl-k q": "editor::Rewrap",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
@@ -99,16 +100,21 @@
"shift-down": "editor::SelectDown",
"shift-left": "editor::SelectLeft",
"shift-right": "editor::SelectRight",
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
"ctrl-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"ctrl-shift-home": "editor::SelectToBeginning",
"ctrl-shift-end": "editor::SelectToEnd",
"ctrl-a": "editor::SelectAll",
"ctrl-l": "editor::SelectLine",
"ctrl-shift-i": "editor::Format",
"alt-shift-o": "editor::OrganizeImports",
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
// "cmd-shift-right": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
// "ctrl-shift-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
"shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
// "alt-v": ["editor::MovePageUp", { "center_cursor": true }],
"ctrl-alt-space": "editor::ShowCharacterPalette",
"ctrl-;": "editor::ToggleLineNumbers",
"ctrl-'": "editor::ToggleSelectedDiffHunks",
@@ -134,6 +140,7 @@
"find": "buffer_search::Deploy",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": "buffer_search::DeployReplace",
// "cmd-e": ["buffer_search::Deploy", { "focus": false }],
"ctrl->": "assistant::QuoteSelection",
"ctrl-<": "assistant::InsertIntoEditor",
"ctrl-alt-e": "editor::SelectEnclosingSymbol",
@@ -146,7 +153,8 @@
"context": "Editor && mode == full && edit_prediction",
"bindings": {
"alt-]": "editor::NextEditPrediction",
"alt-[": "editor::PreviousEditPrediction"
"alt-[": "editor::PreviousEditPrediction",
"alt-right": "editor::AcceptPartialEditPrediction"
}
},
{
@@ -211,6 +219,7 @@
"ctrl-enter": "assistant::Assist",
"ctrl-s": "workspace::Save",
"save": "workspace::Save",
"ctrl->": "assistant::QuoteSelection",
"ctrl-<": "assistant::InsertIntoEditor",
"shift-enter": "assistant::Split",
"ctrl-r": "assistant::CycleMessageRole",
@@ -236,7 +245,6 @@
"ctrl-shift-j": "agent::ToggleNavigationMenu",
"ctrl-shift-i": "agent::ToggleOptionsMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl->": "assistant::QuoteSelection",
"ctrl-alt-e": "agent::RemoveAllContext",
"ctrl-shift-e": "project_panel::ToggleFocus",
"ctrl-shift-enter": "agent::ContinueThread",
@@ -260,8 +268,8 @@
{
"context": "AgentPanel && prompt_editor",
"bindings": {
"ctrl-n": "agent::NewTextThread",
"ctrl-alt-t": "agent::NewThread"
"cmd-n": "agent::NewTextThread",
"cmd-alt-t": "agent::NewThread"
}
},
{
@@ -654,16 +662,14 @@
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"alt-l": "editor::AcceptEditPrediction",
"tab": "editor::AcceptEditPrediction",
"alt-right": "editor::AcceptPartialEditPrediction"
"tab": "editor::AcceptEditPrediction"
}
},
{
"context": "Editor && edit_prediction_conflict",
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"alt-l": "editor::AcceptEditPrediction",
"alt-right": "editor::AcceptPartialEditPrediction"
"alt-l": "editor::AcceptEditPrediction"
}
},
{

View File

@@ -181,7 +181,8 @@
"use_key_equivalents": true,
"bindings": {
"alt-tab": "editor::NextEditPrediction",
"alt-shift-tab": "editor::PreviousEditPrediction"
"alt-shift-tab": "editor::PreviousEditPrediction",
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
}
},
{
@@ -252,6 +253,7 @@
"bindings": {
"cmd-enter": "assistant::Assist",
"cmd-s": "workspace::Save",
"cmd->": "assistant::QuoteSelection",
"cmd-<": "assistant::InsertIntoEditor",
"shift-enter": "assistant::Split",
"ctrl-r": "assistant::CycleMessageRole",
@@ -278,7 +280,6 @@
"cmd-shift-j": "agent::ToggleNavigationMenu",
"cmd-shift-i": "agent::ToggleOptionsMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"cmd->": "assistant::QuoteSelection",
"cmd-alt-e": "agent::RemoveAllContext",
"cmd-shift-e": "project_panel::ToggleFocus",
"cmd-shift-enter": "agent::ContinueThread",
@@ -718,16 +719,14 @@
"context": "Editor && edit_prediction",
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"tab": "editor::AcceptEditPrediction",
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
"tab": "editor::AcceptEditPrediction"
}
},
{
"context": "Editor && edit_prediction_conflict",
"use_key_equivalents": true,
"bindings": {
"alt-tab": "editor::AcceptEditPrediction",
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
"alt-tab": "editor::AcceptEditPrediction"
}
},
{

View File

@@ -13,9 +13,9 @@
}
},
{
"context": "Editor && vim_mode == insert && !menu",
"context": "Editor",
"bindings": {
// "j k": "vim::SwitchToNormalMode"
// "j k": ["workspace::SendKeystrokes", "escape"]
}
}
]

View File

@@ -38,7 +38,7 @@
"ctrl-shift-d": "editor::DuplicateSelection",
"alt-f3": "editor::SelectAllMatches", // find_all_under
// "ctrl-f3": "", // find_under (cancels any selections)
// "ctrl-alt-shift-g": "" // find_under_prev (cancels any selections)
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
"f9": "editor::SortLinesCaseSensitive",
"ctrl-f9": "editor::SortLinesCaseInsensitive",
"f12": "editor::GoToDefinition",

View File

@@ -28,8 +28,7 @@
"context": "InlineAssistEditor",
"use_key_equivalents": true,
"bindings": {
"cmd-shift-backspace": "editor::Cancel",
"cmd-enter": "menu::Confirm"
"cmd-shift-backspace": "editor::Cancel"
// "alt-enter": // Quick Question
// "cmd-shift-enter": // Full File Context
// "cmd-shift-k": // Toggle input focus (editor <> inline assist)

View File

@@ -711,7 +711,7 @@
}
},
{
"context": "AgentPanel || GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
"context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
"bindings": {
// window related commands (ctrl-w X)
"ctrl-w": null,

View File

@@ -101,12 +101,9 @@
// The second option is decimal.
"unit": "binary"
},
// Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
//
// 1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
// "alt"
// 2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
// "cmd_or_ctrl" (alias: "cmd", "ctrl")
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
"multi_cursor_modifier": "alt",
// Whether to enable vim modes and key bindings.
"vim_mode": false,
@@ -217,8 +214,6 @@
"show_signature_help_after_edits": false,
// Whether to show code action button at start of buffer line.
"inline_code_actions": true,
// Whether to allow drag and drop text selection in buffer.
"drag_and_drop_selection": true,
// What to do when go to definition yields no results.
//
// 1. Do nothing: `none`
@@ -604,9 +599,7 @@
// 2. Never show indent guides:
// "never"
"show": "always"
},
// Whether to hide the root entry when only one folder is open in the window.
"hide_root": false
}
},
"outline_panel": {
// Whether to show the outline panel button in the status bar
@@ -778,6 +771,7 @@
"tools": {
"copy_path": true,
"create_directory": true,
"create_file": true,
"delete_path": true,
"diagnostics": true,
"edit_file": true,
@@ -1040,14 +1034,6 @@
"button": true,
// Whether to show warnings or not by default.
"include_warnings": true,
// Settings for using LSP pull diagnostics mechanism in Zed.
"lsp_pull_diagnostics": {
// Whether to pull for diagnostics or not.
"enabled": true,
// Minimum time to wait before pulling diagnostics from the language server(s).
// 0 turns the debounce off.
"debounce_ms": 50
},
// Settings for inline diagnostics
"inline": {
// Whether to show diagnostics inline or not
@@ -1471,9 +1457,7 @@
"language_servers": ["erlang-ls", "!elp", "..."]
},
"Git Commit": {
"allow_rewrap": "anywhere",
"soft_wrap": "editor_width",
"preferred_line_length": 72
"allow_rewrap": "anywhere"
},
"Go": {
"code_actions_on_format": {
@@ -1551,6 +1535,12 @@
"allowed": true
}
},
"SQL": {
"prettier": {
"allowed": true,
"plugins": ["prettier-plugin-sql"]
}
},
"Starlark": {
"language_servers": ["starpls", "!buck2-lsp", "..."]
},

View File

@@ -261,11 +261,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#bfbdb6ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#d2a6ffff",
"font_style": null,
@@ -321,16 +316,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#d2a6ffff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#5ac1feff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#a9d94bff",
"font_style": null,
@@ -457,9 +442,9 @@
"terminal.foreground": "#5c6166ff",
"terminal.bright_foreground": "#5c6166ff",
"terminal.dim_foreground": "#fcfcfcff",
"terminal.ansi.black": "#5c6166ff",
"terminal.ansi.bright_black": "#3b9ee5ff",
"terminal.ansi.dim_black": "#9c9fa2ff",
"terminal.ansi.black": "#fcfcfcff",
"terminal.ansi.bright_black": "#bcbec0ff",
"terminal.ansi.dim_black": "#5c6166ff",
"terminal.ansi.red": "#ef7271ff",
"terminal.ansi.bright_red": "#febab6ff",
"terminal.ansi.dim_red": "#833538ff",
@@ -478,9 +463,9 @@
"terminal.ansi.cyan": "#4dbf99ff",
"terminal.ansi.bright_cyan": "#ace0cbff",
"terminal.ansi.dim_cyan": "#2a5f4aff",
"terminal.ansi.white": "#fcfcfcff",
"terminal.ansi.bright_white": "#fcfcfcff",
"terminal.ansi.dim_white": "#bcbec0ff",
"terminal.ansi.white": "#5c6166ff",
"terminal.ansi.bright_white": "#5c6166ff",
"terminal.ansi.dim_white": "#9c9fa2ff",
"link_text.hover": "#3b9ee5ff",
"conflict": "#f1ad49ff",
"conflict.background": "#ffeedaff",
@@ -647,11 +632,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#5c6166ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#a37accff",
"font_style": null,
@@ -707,16 +687,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#a37accff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#3b9ee5ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#86b300ff",
"font_style": null,
@@ -1033,11 +1003,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#cccac2ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#dfbfffff",
"font_style": null,
@@ -1093,16 +1058,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#dfbfffff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#72cffeff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#d4fe7fff",
"font_style": null,

View File

@@ -270,11 +270,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#d3869bff",
"font_style": null,
@@ -330,16 +325,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#fabd2eff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#b8bb25ff",
"font_style": null,
@@ -670,11 +655,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#d3869bff",
"font_style": null,
@@ -730,16 +710,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#fabd2eff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#b8bb25ff",
"font_style": null,
@@ -1070,11 +1040,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#d3869bff",
"font_style": null,
@@ -1130,16 +1095,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#fabd2eff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#83a598ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#b8bb25ff",
"font_style": null,
@@ -1272,9 +1227,9 @@
"terminal.foreground": "#282828ff",
"terminal.bright_foreground": "#282828ff",
"terminal.dim_foreground": "#fbf1c7ff",
"terminal.ansi.black": "#282828ff",
"terminal.ansi.bright_black": "#0b6678ff",
"terminal.ansi.dim_black": "#5f5650ff",
"terminal.ansi.black": "#fbf1c7ff",
"terminal.ansi.bright_black": "#b0a189ff",
"terminal.ansi.dim_black": "#282828ff",
"terminal.ansi.red": "#9d0308ff",
"terminal.ansi.bright_red": "#db8b7aff",
"terminal.ansi.dim_red": "#4e1207ff",
@@ -1293,9 +1248,9 @@
"terminal.ansi.cyan": "#437b59ff",
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#fbf1c7ff",
"terminal.ansi.bright_white": "#fbf1c7ff",
"terminal.ansi.dim_white": "#b0a189ff",
"terminal.ansi.white": "#282828ff",
"terminal.ansi.bright_white": "#282828ff",
"terminal.ansi.dim_white": "#73675eff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
"version_control.modified": "#b57615ff",
@@ -1470,11 +1425,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#066578ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#8f3e71ff",
"font_style": null,
@@ -1530,16 +1480,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#b57613ff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#79740eff",
"font_style": null,
@@ -1672,9 +1612,9 @@
"terminal.foreground": "#282828ff",
"terminal.bright_foreground": "#282828ff",
"terminal.dim_foreground": "#f9f5d7ff",
"terminal.ansi.black": "#282828ff",
"terminal.ansi.bright_black": "#73675eff",
"terminal.ansi.dim_black": "#f9f5d7ff",
"terminal.ansi.black": "#f9f5d7ff",
"terminal.ansi.bright_black": "#b0a189ff",
"terminal.ansi.dim_black": "#282828ff",
"terminal.ansi.red": "#9d0308ff",
"terminal.ansi.bright_red": "#db8b7aff",
"terminal.ansi.dim_red": "#4e1207ff",
@@ -1693,9 +1633,9 @@
"terminal.ansi.cyan": "#437b59ff",
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#f9f5d7ff",
"terminal.ansi.bright_white": "#f9f5d7ff",
"terminal.ansi.dim_white": "#b0a189ff",
"terminal.ansi.white": "#282828ff",
"terminal.ansi.bright_white": "#282828ff",
"terminal.ansi.dim_white": "#73675eff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
"version_control.modified": "#b57615ff",
@@ -1870,11 +1810,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#066578ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#8f3e71ff",
"font_style": null,
@@ -1930,16 +1865,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#b57613ff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#79740eff",
"font_style": null,
@@ -2072,9 +1997,9 @@
"terminal.foreground": "#282828ff",
"terminal.bright_foreground": "#282828ff",
"terminal.dim_foreground": "#f2e5bcff",
"terminal.ansi.black": "#282828ff",
"terminal.ansi.bright_black": "#73675eff",
"terminal.ansi.dim_black": "#f2e5bcff",
"terminal.ansi.black": "#f2e5bcff",
"terminal.ansi.bright_black": "#b0a189ff",
"terminal.ansi.dim_black": "#282828ff",
"terminal.ansi.red": "#9d0308ff",
"terminal.ansi.bright_red": "#db8b7aff",
"terminal.ansi.dim_red": "#4e1207ff",
@@ -2093,9 +2018,9 @@
"terminal.ansi.cyan": "#437b59ff",
"terminal.ansi.bright_cyan": "#9fbca8ff",
"terminal.ansi.dim_cyan": "#253e2eff",
"terminal.ansi.white": "#f2e5bcff",
"terminal.ansi.bright_white": "#f2e5bcff",
"terminal.ansi.dim_white": "#b0a189ff",
"terminal.ansi.white": "#282828ff",
"terminal.ansi.bright_white": "#282828ff",
"terminal.ansi.dim_white": "#73675eff",
"link_text.hover": "#0b6678ff",
"version_control.added": "#797410ff",
"version_control.modified": "#b57615ff",
@@ -2270,11 +2195,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#066578ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#8f3e71ff",
"font_style": null,
@@ -2330,16 +2250,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#b57613ff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#79740eff",
"font_style": null,

View File

@@ -99,8 +99,6 @@
"version_control.added": "#27a657ff",
"version_control.modified": "#d3b020ff",
"version_control.deleted": "#e06c76ff",
"version_control.conflict_marker.ours": "#a1c1811a",
"version_control.conflict_marker.theirs": "#74ade81a",
"conflict": "#dec184ff",
"conflict.background": "#dec1841a",
"conflict.border": "#5d4c2fff",
@@ -266,11 +264,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#dce0e5ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#bf956aff",
"font_style": null,
@@ -326,16 +319,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#dfc184ff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#74ade8ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#a1c181ff",
"font_style": null,
@@ -467,9 +450,9 @@
"terminal.foreground": "#242529ff",
"terminal.bright_foreground": "#242529ff",
"terminal.dim_foreground": "#fafafaff",
"terminal.ansi.black": "#242529ff",
"terminal.ansi.bright_black": "#242529ff",
"terminal.ansi.dim_black": "#97979aff",
"terminal.ansi.black": "#fafafaff",
"terminal.ansi.bright_black": "#aaaaaaff",
"terminal.ansi.dim_black": "#242529ff",
"terminal.ansi.red": "#d36151ff",
"terminal.ansi.bright_red": "#f0b0a4ff",
"terminal.ansi.dim_red": "#6f312aff",
@@ -488,9 +471,9 @@
"terminal.ansi.cyan": "#3a82b7ff",
"terminal.ansi.bright_cyan": "#a3bedaff",
"terminal.ansi.dim_cyan": "#254058ff",
"terminal.ansi.white": "#fafafaff",
"terminal.ansi.bright_white": "#fafafaff",
"terminal.ansi.dim_white": "#aaaaaaff",
"terminal.ansi.white": "#242529ff",
"terminal.ansi.bright_white": "#242529ff",
"terminal.ansi.dim_white": "#97979aff",
"link_text.hover": "#5c78e2ff",
"version_control.added": "#27a657ff",
"version_control.modified": "#d3b020ff",
@@ -660,11 +643,6 @@
"font_style": null,
"font_weight": null
},
"namespace": {
"color": "#242529ff",
"font_style": null,
"font_weight": null
},
"number": {
"color": "#ad6e25ff",
"font_style": null,
@@ -720,16 +698,6 @@
"font_style": null,
"font_weight": null
},
"selector": {
"color": "#669f59ff",
"font_style": null,
"font_weight": null
},
"selector.pseudo": {
"color": "#5c78e2ff",
"font_style": null,
"font_weight": null
},
"string": {
"color": "#649f57ff",
"font_style": null,

View File

@@ -25,6 +25,7 @@ assistant_context_editor.workspace = true
assistant_slash_command.workspace = true
assistant_slash_commands.workspace = true
assistant_tool.workspace = true
async-watch.workspace = true
audio.workspace = true
buffer_diff.workspace = true
chrono.workspace = true
@@ -94,7 +95,6 @@ ui_input.workspace = true
urlencoding.workspace = true
util.workspace = true
uuid.workspace = true
watch.workspace = true
workspace-hack.workspace = true
workspace.workspace = true
zed_actions.workspace = true
@@ -102,13 +102,11 @@ zed_llm_client.workspace = true
zstd.workspace = true
[dev-dependencies]
assistant_tools.workspace = true
buffer_diff = { workspace = true, features = ["test-support"] }
editor = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] }
indoc.workspace = true
language = { workspace = true, "features" = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true

View File

@@ -1144,10 +1144,6 @@ impl ActiveThread {
cx,
);
}
ThreadEvent::ProfileChanged => {
self.save_thread(cx);
cx.notify();
}
}
}
@@ -1788,31 +1784,12 @@ impl ActiveThread {
fn render_message(&self, ix: usize, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
let message_id = self.messages[ix];
let workspace = self.workspace.clone();
let thread = self.thread.read(cx);
let is_first_message = ix == 0;
let is_last_message = ix == self.messages.len() - 1;
let Some(message) = thread.message(message_id) else {
let Some(message) = self.thread.read(cx).message(message_id) else {
return Empty.into_any();
};
let is_generating = thread.is_generating();
let is_generating_stale = thread.is_generation_stale().unwrap_or(false);
let loading_dots = (is_generating && is_last_message).then(|| {
h_flex()
.h_8()
.my_3()
.mx_5()
.when(is_generating_stale || message.is_hidden, |this| {
this.child(AnimatedLabel::new("").size(LabelSize::Small))
})
});
if message.is_hidden {
return div().children(loading_dots).into_any();
return Empty.into_any();
}
let message_creases = message.creases.clone();
@@ -1821,6 +1798,9 @@ impl ActiveThread {
return Empty.into_any();
};
let workspace = self.workspace.clone();
let thread = self.thread.read(cx);
// Get all the data we need from thread before we start using it in closures
let checkpoint = thread.checkpoint_for_message(message_id);
let configured_model = thread.configured_model().map(|m| m.model);
@@ -1831,6 +1811,14 @@ impl ActiveThread {
let tool_uses = thread.tool_uses_for_message(message_id, cx);
let has_tool_uses = !tool_uses.is_empty();
let is_generating = thread.is_generating();
let is_generating_stale = thread.is_generation_stale().unwrap_or(false);
let is_first_message = ix == 0;
let is_last_message = ix == self.messages.len() - 1;
let loading_dots = (is_generating_stale && is_last_message)
.then(|| AnimatedLabel::new("").size(LabelSize::Small));
let editing_message_state = self
.editing_message
@@ -2246,7 +2234,17 @@ impl ActiveThread {
parent.child(self.render_rules_item(cx))
})
.child(styled_message)
.children(loading_dots)
.when(is_generating && is_last_message, |this| {
this.child(
h_flex()
.h_8()
.mt_2()
.mb_4()
.ml_4()
.py_1p5()
.when_some(loading_dots, |this, loading_dots| this.child(loading_dots)),
)
})
.when(show_feedback, move |parent| {
parent.child(feedback_items).when_some(
self.open_feedback_editors.get(&message_id),

View File

@@ -3,7 +3,6 @@ mod agent_configuration;
mod agent_diff;
mod agent_model_selector;
mod agent_panel;
mod agent_profile;
mod buffer_codegen;
mod context;
mod context_picker;

View File

@@ -12,7 +12,7 @@ use context_server::ContextServerId;
use fs::Fs;
use gpui::{
Action, Animation, AnimationExt as _, AnyView, App, Entity, EventEmitter, FocusHandle,
Focusable, ScrollHandle, Subscription, Transformation, percentage,
Focusable, ScrollHandle, Subscription, pulsating_between,
};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use project::context_server_store::{ContextServerStatus, ContextServerStore};
@@ -475,6 +475,7 @@ impl AgentConfiguration {
.get(&context_server_id)
.copied()
.unwrap_or_default();
let tools = tools_by_source
.get(&ToolSource::ContextServer {
id: context_server_id.0.clone().into(),
@@ -483,23 +484,25 @@ impl AgentConfiguration {
let tool_count = tools.len();
let border_color = cx.theme().colors().border.opacity(0.6);
let success_color = Color::Success.color(cx);
let (status_indicator, tooltip_text) = match server_status {
ContextServerStatus::Starting => (
Icon::new(IconName::LoadCircle)
.size(IconSize::XSmall)
.color(Color::Accent)
Indicator::dot()
.color(Color::Success)
.with_animation(
SharedString::from(format!("{}-starting", context_server_id.0.clone(),)),
Animation::new(Duration::from_secs(3)).repeat(),
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
Animation::new(Duration::from_secs(2))
.repeat()
.with_easing(pulsating_between(0.4, 1.)),
move |this, delta| this.color(success_color.alpha(delta).into()),
)
.into_any_element(),
"Server is starting.",
),
ContextServerStatus::Running => (
Indicator::dot().color(Color::Success).into_any_element(),
"Server is active.",
"Server is running.",
),
ContextServerStatus::Error(_) => (
Indicator::dot().color(Color::Error).into_any_element(),
@@ -523,11 +526,12 @@ impl AgentConfiguration {
.p_1()
.justify_between()
.when(
error.is_some() || are_tools_expanded && tool_count >= 1,
error.is_some() || are_tools_expanded && tool_count > 1,
|element| element.border_b_1().border_color(border_color),
)
.child(
h_flex()
.gap_1p5()
.child(
Disclosure::new(
"tool-list-disclosure",
@@ -547,16 +551,12 @@ impl AgentConfiguration {
})),
)
.child(
h_flex()
.id(SharedString::from(format!("tooltip-{}", item_id)))
.h_full()
.w_3()
.mx_1()
.justify_center()
div()
.id(item_id.clone())
.tooltip(Tooltip::text(tooltip_text))
.child(status_indicator),
)
.child(Label::new(item_id).ml_0p5().mr_1p5())
.child(Label::new(context_server_id.0.clone()).ml_0p5())
.when(is_running, |this| {
this.child(
Label::new(if tool_count == 1 {

View File

@@ -2,21 +2,25 @@ mod profile_modal_header;
use std::sync::Arc;
use agent_settings::{AgentProfileId, AgentSettings, builtin_profiles};
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles};
use assistant_tool::ToolWorkingSet;
use convert_case::{Case, Casing as _};
use editor::Editor;
use fs::Fs;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
use settings::Settings as _;
use gpui::{
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, WeakEntity,
prelude::*,
};
use settings::{Settings as _, update_settings_file};
use ui::{
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
};
use util::ResultExt as _;
use workspace::{ModalView, Workspace};
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
use crate::agent_profile::AgentProfile;
use crate::{AgentPanel, ManageProfiles};
use crate::{AgentPanel, ManageProfiles, ThreadStore};
use super::tool_picker::ToolPickerMode;
@@ -99,6 +103,7 @@ pub struct NewProfileMode {
pub struct ManageProfilesModal {
fs: Arc<dyn Fs>,
tools: Entity<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
focus_handle: FocusHandle,
mode: Mode,
}
@@ -114,8 +119,9 @@ impl ManageProfilesModal {
let fs = workspace.app_state().fs.clone();
let thread_store = panel.read(cx).thread_store();
let tools = thread_store.read(cx).tools();
let thread_store = thread_store.downgrade();
workspace.toggle_modal(window, cx, |window, cx| {
let mut this = Self::new(fs, tools, window, cx);
let mut this = Self::new(fs, tools, thread_store, window, cx);
if let Some(profile_id) = action.customize_tools.clone() {
this.configure_builtin_tools(profile_id, window, cx);
@@ -130,6 +136,7 @@ impl ManageProfilesModal {
pub fn new(
fs: Arc<dyn Fs>,
tools: Entity<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
@@ -138,6 +145,7 @@ impl ManageProfilesModal {
Self {
fs,
tools,
thread_store,
focus_handle,
mode: Mode::choose_profile(window, cx),
}
@@ -198,6 +206,7 @@ impl ManageProfilesModal {
ToolPickerMode::McpTools,
self.fs.clone(),
self.tools.clone(),
self.thread_store.clone(),
profile_id.clone(),
profile,
cx,
@@ -235,6 +244,7 @@ impl ManageProfilesModal {
ToolPickerMode::BuiltinTools,
self.fs.clone(),
self.tools.clone(),
self.thread_store.clone(),
profile_id.clone(),
profile,
cx,
@@ -260,10 +270,32 @@ impl ManageProfilesModal {
match &self.mode {
Mode::ChooseProfile { .. } => {}
Mode::NewProfile(mode) => {
let name = mode.name_editor.read(cx).text(cx);
let settings = AgentSettings::get_global(cx);
let profile_id =
AgentProfile::create(name, mode.base_profile_id.clone(), self.fs.clone(), cx);
let base_profile = mode
.base_profile_id
.as_ref()
.and_then(|profile_id| settings.profiles.get(profile_id).cloned());
let name = mode.name_editor.read(cx).text(cx);
let profile_id = AgentProfileId(name.to_case(Case::Kebab).into());
let profile = AgentProfile {
name: name.into(),
tools: base_profile
.as_ref()
.map(|profile| profile.tools.clone())
.unwrap_or_default(),
enable_all_context_servers: base_profile
.as_ref()
.map(|profile| profile.enable_all_context_servers)
.unwrap_or_default(),
context_servers: base_profile
.map(|profile| profile.context_servers)
.unwrap_or_default(),
};
self.create_profile(profile_id.clone(), profile, cx);
self.view_profile(profile_id, window, cx);
}
Mode::ViewProfile(_) => {}
@@ -293,6 +325,19 @@ impl ManageProfilesModal {
}
}
}
fn create_profile(
&self,
profile_id: AgentProfileId,
profile: AgentProfile,
cx: &mut Context<Self>,
) {
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
move |settings, _cx| {
settings.create_profile(profile_id, profile).log_err();
}
});
}
}
impl ModalView for ManageProfilesModal {}
@@ -475,13 +520,14 @@ impl ManageProfilesModal {
) -> impl IntoElement {
let settings = AgentSettings::get_global(cx);
let profile_id = &settings.default_profile;
let profile_name = settings
.profiles
.get(&mode.profile_id)
.map(|profile| profile.name.clone())
.unwrap_or_else(|| "Unknown".into());
let icon = match mode.profile_id.as_str() {
let icon = match profile_id.as_str() {
"write" => IconName::Pencil,
"ask" => IconName::MessageBubbles,
_ => IconName::UserRoundPen,

View File

@@ -1,17 +1,19 @@
use std::{collections::BTreeMap, sync::Arc};
use agent_settings::{
AgentProfileContent, AgentProfileId, AgentProfileSettings, AgentSettings, AgentSettingsContent,
AgentProfile, AgentProfileContent, AgentProfileId, AgentSettings, AgentSettingsContent,
ContextServerPresetContent,
};
use assistant_tool::{ToolSource, ToolWorkingSet};
use fs::Fs;
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
use picker::{Picker, PickerDelegate};
use settings::update_settings_file;
use settings::{Settings as _, update_settings_file};
use ui::{ListItem, ListItemSpacing, prelude::*};
use util::ResultExt as _;
use crate::ThreadStore;
pub struct ToolPicker {
picker: Entity<Picker<ToolPickerDelegate>>,
}
@@ -69,10 +71,11 @@ pub enum PickerItem {
pub struct ToolPickerDelegate {
tool_picker: WeakEntity<ToolPicker>,
thread_store: WeakEntity<ThreadStore>,
fs: Arc<dyn Fs>,
items: Arc<Vec<PickerItem>>,
profile_id: AgentProfileId,
profile_settings: AgentProfileSettings,
profile: AgentProfile,
filtered_items: Vec<PickerItem>,
selected_index: usize,
mode: ToolPickerMode,
@@ -83,18 +86,20 @@ impl ToolPickerDelegate {
mode: ToolPickerMode,
fs: Arc<dyn Fs>,
tool_set: Entity<ToolWorkingSet>,
thread_store: WeakEntity<ThreadStore>,
profile_id: AgentProfileId,
profile_settings: AgentProfileSettings,
profile: AgentProfile,
cx: &mut Context<ToolPicker>,
) -> Self {
let items = Arc::new(Self::resolve_items(mode, &tool_set, cx));
Self {
tool_picker: cx.entity().downgrade(),
thread_store,
fs,
items,
profile_id,
profile_settings,
profile,
filtered_items: Vec::new(),
selected_index: 0,
mode,
@@ -244,31 +249,28 @@ impl PickerDelegate for ToolPickerDelegate {
};
let is_currently_enabled = if let Some(server_id) = server_id.clone() {
let preset = self
.profile_settings
.context_servers
.entry(server_id)
.or_default();
let preset = self.profile.context_servers.entry(server_id).or_default();
let is_enabled = *preset.tools.entry(tool_name.clone()).or_default();
*preset.tools.entry(tool_name.clone()).or_default() = !is_enabled;
is_enabled
} else {
let is_enabled = *self
.profile_settings
.tools
.entry(tool_name.clone())
.or_default();
*self
.profile_settings
.tools
.entry(tool_name.clone())
.or_default() = !is_enabled;
let is_enabled = *self.profile.tools.entry(tool_name.clone()).or_default();
*self.profile.tools.entry(tool_name.clone()).or_default() = !is_enabled;
is_enabled
};
let active_profile_id = &AgentSettings::get_global(cx).default_profile;
if active_profile_id == &self.profile_id {
self.thread_store
.update(cx, |this, cx| {
this.load_profile(self.profile.clone(), cx);
})
.log_err();
}
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
let profile_id = self.profile_id.clone();
let default_profile = self.profile_settings.clone();
let default_profile = self.profile.clone();
let server_id = server_id.clone();
let tool_name = tool_name.clone();
move |settings: &mut AgentSettingsContent, _cx| {
@@ -346,18 +348,14 @@ impl PickerDelegate for ToolPickerDelegate {
),
PickerItem::Tool { name, server_id } => {
let is_enabled = if let Some(server_id) = server_id {
self.profile_settings
self.profile
.context_servers
.get(server_id.as_ref())
.and_then(|preset| preset.tools.get(name))
.copied()
.unwrap_or(self.profile_settings.enable_all_context_servers)
.unwrap_or(self.profile.enable_all_context_servers)
} else {
self.profile_settings
.tools
.get(name)
.copied()
.unwrap_or(false)
self.profile.tools.get(name).copied().unwrap_or(false)
};
Some(

View File

@@ -1378,8 +1378,7 @@ impl AgentDiff {
| ThreadEvent::CheckpointChanged
| ThreadEvent::ToolConfirmationNeeded
| ThreadEvent::ToolUseLimitReached
| ThreadEvent::CancelEditing
| ThreadEvent::ProfileChanged => {}
| ThreadEvent::CancelEditing => {}
}
}

View File

@@ -57,7 +57,7 @@ use zed_llm_client::{CompletionIntent, UsageLimit};
use crate::active_thread::{self, ActiveThread, ActiveThreadEvent};
use crate::agent_configuration::{AgentConfiguration, AssistantConfigurationEvent};
use crate::agent_diff::AgentDiff;
use crate::history_store::{HistoryEntryId, HistoryStore};
use crate::history_store::{HistoryStore, RecentEntry};
use crate::message_editor::{MessageEditor, MessageEditorEvent};
use crate::thread::{Thread, ThreadError, ThreadId, ThreadSummary, TokenUsageRatio};
use crate::thread_history::{HistoryEntryElement, ThreadHistory};
@@ -257,7 +257,6 @@ impl ActiveView {
pub fn prompt_editor(
context_editor: Entity<ContextEditor>,
history_store: Entity<HistoryStore>,
language_registry: Arc<LanguageRegistry>,
window: &mut Window,
cx: &mut App,
@@ -323,19 +322,6 @@ impl ActiveView {
editor.set_text(summary, window, cx);
})
}
ContextEvent::PathChanged { old_path, new_path } => {
history_store.update(cx, |history_store, cx| {
if let Some(old_path) = old_path {
history_store
.replace_recently_opened_text_thread(old_path, new_path, cx);
} else {
history_store.push_recently_opened_entry(
HistoryEntryId::Context(new_path.clone()),
cx,
);
}
});
}
_ => {}
}
}),
@@ -530,7 +516,8 @@ impl AgentPanel {
HistoryStore::new(
thread_store.clone(),
context_store.clone(),
[HistoryEntryId::Thread(thread_id)],
[RecentEntry::Thread(thread_id, thread.clone())],
window,
cx,
)
});
@@ -557,13 +544,7 @@ impl AgentPanel {
editor.insert_default_prompt(window, cx);
editor
});
ActiveView::prompt_editor(
context_editor,
history_store.clone(),
language_registry.clone(),
window,
cx,
)
ActiveView::prompt_editor(context_editor, language_registry.clone(), window, cx)
}
};
@@ -600,9 +581,86 @@ impl AgentPanel {
let panel = weak_panel.clone();
let assistant_navigation_menu =
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
if let Some(panel) = panel.upgrade() {
menu = Self::populate_recently_opened_menu_section(menu, panel, cx);
let recently_opened = panel
.update(cx, |this, cx| {
this.history_store.update(cx, |history_store, cx| {
history_store.recently_opened_entries(cx)
})
})
.unwrap_or_default();
if !recently_opened.is_empty() {
menu = menu.header("Recently Opened");
for entry in recently_opened.iter() {
if let RecentEntry::Context(context) = entry {
if context.read(cx).path().is_none() {
log::error!(
"bug: text thread in recent history list was never saved"
);
continue;
}
}
let summary = entry.summary(cx);
menu = menu.entry_with_end_slot_on_hover(
summary,
None,
{
let panel = panel.clone();
let entry = entry.clone();
move |window, cx| {
panel
.update(cx, {
let entry = entry.clone();
move |this, cx| match entry {
RecentEntry::Thread(_, thread) => {
this.open_thread(thread, window, cx)
}
RecentEntry::Context(context) => {
let Some(path) = context.read(cx).path()
else {
return;
};
this.open_saved_prompt_editor(
path.clone(),
window,
cx,
)
.detach_and_log_err(cx)
}
}
})
.ok();
}
},
IconName::Close,
"Close Entry".into(),
{
let panel = panel.clone();
let entry = entry.clone();
move |_window, cx| {
panel
.update(cx, |this, cx| {
this.history_store.update(
cx,
|history_store, cx| {
history_store.remove_recently_opened_entry(
&entry, cx,
);
},
);
})
.ok();
}
},
);
}
menu = menu.separator();
}
menu.action("View All", Box::new(OpenHistory))
.end_slot_action(DeleteRecentlyOpenThread.boxed_clone())
.fixed_width(px(320.).into())
@@ -840,7 +898,6 @@ impl AgentPanel {
self.set_active_view(
ActiveView::prompt_editor(
context_editor.clone(),
self.history_store.clone(),
self.language_registry.clone(),
window,
cx,
@@ -927,13 +984,7 @@ impl AgentPanel {
)
});
self.set_active_view(
ActiveView::prompt_editor(
editor.clone(),
self.history_store.clone(),
self.language_registry.clone(),
window,
cx,
),
ActiveView::prompt_editor(editor.clone(), self.language_registry.clone(), window, cx),
window,
cx,
);
@@ -1332,6 +1383,16 @@ impl AgentPanel {
}
}
}
ActiveView::TextThread { context_editor, .. } => {
let context = context_editor.read(cx).context();
// When switching away from an unsaved text thread, delete its entry.
if context.read(cx).path().is_none() {
let context = context.clone();
self.history_store.update(cx, |store, cx| {
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
});
}
}
_ => {}
}
@@ -1339,14 +1400,13 @@ impl AgentPanel {
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
if let Some(thread) = thread.upgrade() {
let id = thread.read(cx).id().clone();
store.push_recently_opened_entry(HistoryEntryId::Thread(id), cx);
store.push_recently_opened_entry(RecentEntry::Thread(id, thread), cx);
}
}),
ActiveView::TextThread { context_editor, .. } => {
self.history_store.update(cx, |store, cx| {
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx)
}
let context = context_editor.read(cx).context().clone();
store.push_recently_opened_entry(RecentEntry::Context(context), cx)
})
}
_ => {}
@@ -1365,70 +1425,6 @@ impl AgentPanel {
self.focus_handle(cx).focus(window);
}
fn populate_recently_opened_menu_section(
mut menu: ContextMenu,
panel: Entity<Self>,
cx: &mut Context<ContextMenu>,
) -> ContextMenu {
let entries = panel
.read(cx)
.history_store
.read(cx)
.recently_opened_entries(cx);
if entries.is_empty() {
return menu;
}
menu = menu.header("Recently Opened");
for entry in entries {
let title = entry.title().clone();
let id = entry.id();
menu = menu.entry_with_end_slot_on_hover(
title,
None,
{
let panel = panel.downgrade();
let id = id.clone();
move |window, cx| {
let id = id.clone();
panel
.update(cx, move |this, cx| match id {
HistoryEntryId::Thread(id) => this
.open_thread_by_id(&id, window, cx)
.detach_and_log_err(cx),
HistoryEntryId::Context(path) => this
.open_saved_prompt_editor(path.clone(), window, cx)
.detach_and_log_err(cx),
})
.ok();
}
},
IconName::Close,
"Close Entry".into(),
{
let panel = panel.downgrade();
let id = id.clone();
move |_window, cx| {
panel
.update(cx, |this, cx| {
this.history_store.update(cx, |history_store, cx| {
history_store.remove_recently_opened_entry(&id, cx);
});
})
.ok();
}
},
);
}
menu = menu.separator();
menu
}
}
impl Focusable for AgentPanel {

View File

@@ -1,334 +0,0 @@
use std::sync::Arc;
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
use assistant_tool::{Tool, ToolSource, ToolWorkingSet};
use collections::IndexMap;
use convert_case::{Case, Casing};
use fs::Fs;
use gpui::{App, Entity};
use settings::{Settings, update_settings_file};
use ui::SharedString;
use util::ResultExt;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct AgentProfile {
id: AgentProfileId,
tool_set: Entity<ToolWorkingSet>,
}
pub type AvailableProfiles = IndexMap<AgentProfileId, SharedString>;
impl AgentProfile {
pub fn new(id: AgentProfileId, tool_set: Entity<ToolWorkingSet>) -> Self {
Self { id, tool_set }
}
/// Saves a new profile to the settings.
pub fn create(
name: String,
base_profile_id: Option<AgentProfileId>,
fs: Arc<dyn Fs>,
cx: &App,
) -> AgentProfileId {
let id = AgentProfileId(name.to_case(Case::Kebab).into());
let base_profile =
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
let profile_settings = AgentProfileSettings {
name: name.into(),
tools: base_profile
.as_ref()
.map(|profile| profile.tools.clone())
.unwrap_or_default(),
enable_all_context_servers: base_profile
.as_ref()
.map(|profile| profile.enable_all_context_servers)
.unwrap_or_default(),
context_servers: base_profile
.map(|profile| profile.context_servers)
.unwrap_or_default(),
};
update_settings_file::<AgentSettings>(fs, cx, {
let id = id.clone();
move |settings, _cx| {
settings.create_profile(id, profile_settings).log_err();
}
});
id
}
/// Returns a map of AgentProfileIds to their names
pub fn available_profiles(cx: &App) -> AvailableProfiles {
let mut profiles = AvailableProfiles::default();
for (id, profile) in AgentSettings::get_global(cx).profiles.iter() {
profiles.insert(id.clone(), profile.name.clone());
}
profiles
}
pub fn id(&self) -> &AgentProfileId {
&self.id
}
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
return Vec::new();
};
self.tool_set
.read(cx)
.tools(cx)
.into_iter()
.filter(|tool| Self::is_enabled(settings, tool.source(), tool.name()))
.collect()
}
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
match source {
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
ToolSource::ContextServer { id } => {
if settings.enable_all_context_servers {
return true;
}
let Some(preset) = settings.context_servers.get(id.as_ref()) else {
return false;
};
*preset.tools.get(name.as_str()).unwrap_or(&false)
}
}
}
}
#[cfg(test)]
mod tests {
use agent_settings::ContextServerPreset;
use assistant_tool::ToolRegistry;
use collections::IndexMap;
use gpui::{AppContext, TestAppContext};
use http_client::FakeHttpClient;
use project::Project;
use settings::{Settings, SettingsStore};
use ui::SharedString;
use super::*;
#[gpui::test]
async fn test_enabled_built_in_tools_for_profile(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId::default();
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id.clone(), tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
// Provider dependent
.filter(|tool| tool != "web_search")
.collect::<Vec<_>>();
// Plus all registered MCP tools
expected_tools.extend(["enabled_mcp_tool".into(), "disabled_mcp_tool".into()]);
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
#[gpui::test]
async fn test_custom_mcp_settings(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId("custom_mcp".into());
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id.clone(), tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings.context_servers["mcp"]
.tools
.iter()
.filter_map(|(key, enabled)| enabled.then(|| key.to_string()))
.collect::<Vec<_>>();
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
#[gpui::test]
async fn test_only_built_in(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId("write_minus_mcp".into());
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id.clone(), tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|tool| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
// Provider dependent
.filter(|tool| tool != "web_search")
.collect::<Vec<_>>();
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
fn init_test_settings(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
Project::init_settings(cx);
AgentSettings::register(cx);
language_model::init_settings(cx);
ToolRegistry::default_global(cx);
assistant_tools::init(FakeHttpClient::with_404_response(), cx);
});
cx.update(|cx| {
let mut agent_settings = AgentSettings::get_global(cx).clone();
agent_settings.profiles.insert(
AgentProfileId("write_minus_mcp".into()),
AgentProfileSettings {
name: "write_minus_mcp".into(),
enable_all_context_servers: false,
..agent_settings.profiles[&AgentProfileId::default()].clone()
},
);
agent_settings.profiles.insert(
AgentProfileId("custom_mcp".into()),
AgentProfileSettings {
name: "mcp".into(),
tools: IndexMap::default(),
enable_all_context_servers: false,
context_servers: IndexMap::from_iter([("mcp".into(), context_server_preset())]),
},
);
AgentSettings::override_global(agent_settings, cx);
})
}
fn context_server_preset() -> ContextServerPreset {
ContextServerPreset {
tools: IndexMap::from_iter([
("enabled_mcp_tool".into(), true),
("disabled_mcp_tool".into(), false),
]),
}
}
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
cx.new(|_| {
let mut tool_set = ToolWorkingSet::default();
tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")));
tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")));
tool_set
})
}
struct FakeTool {
name: String,
source: SharedString,
}
impl FakeTool {
fn new(name: impl Into<String>, source: impl Into<SharedString>) -> Self {
Self {
name: name.into(),
source: source.into(),
}
}
}
impl Tool for FakeTool {
fn name(&self) -> String {
self.name.clone()
}
fn source(&self) -> ToolSource {
ToolSource::ContextServer {
id: self.source.clone(),
}
}
fn description(&self) -> String {
unimplemented!()
}
fn icon(&self) -> ui::IconName {
unimplemented!()
}
fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
unimplemented!()
}
fn ui_text(&self, _input: &serde_json::Value) -> String {
unimplemented!()
}
fn run(
self: Arc<Self>,
_input: serde_json::Value,
_request: Arc<language_model::LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<assistant_tool::ActionLog>,
_model: Arc<dyn language_model::LanguageModel>,
_window: Option<gpui::AnyWindowHandle>,
_cx: &mut App,
) -> assistant_tool::ToolResult {
unimplemented!()
}
fn may_perform_edits(&self) -> bool {
unimplemented!()
}
}
}

View File

@@ -386,10 +386,8 @@ impl CodegenAlternative {
async { Ok(LanguageModelTextStream::default()) }.boxed_local()
} else {
let request = self.build_request(&model, user_prompt, cx)?;
cx.spawn(async move |_, cx| {
Ok(model.stream_completion_text(request.await, &cx).await?)
})
.boxed_local()
cx.spawn(async move |_, cx| model.stream_completion_text(request.await, &cx).await)
.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
Ok(())

View File

@@ -1,5 +1,7 @@
use std::cell::RefCell;
use std::ops::Range;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -765,7 +767,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
let snapshot = buffer.read(cx).snapshot();
let source_range = snapshot.anchor_before(state.source_range.start)
..snapshot.anchor_after(state.source_range.end);
..snapshot.anchor_before(state.source_range.end);
let thread_store = self.thread_store.clone();
let text_thread_store = self.text_thread_store.clone();
@@ -910,6 +912,16 @@ impl CompletionProvider for ContextPickerCompletionProvider {
})
}
fn resolve_completions(
&self,
_buffer: Entity<Buffer>,
_completion_indices: Vec<usize>,
_completions: Rc<RefCell<Box<[Completion]>>>,
_cx: &mut Context<Editor>,
) -> Task<Result<bool>> {
Task::ready(Ok(true))
}
fn is_completion_trigger(
&self,
buffer: &Entity<language::Buffer>,
@@ -1065,7 +1077,7 @@ mod tests {
use project::{Project, ProjectPath};
use serde_json::json;
use settings::SettingsStore;
use std::{ops::Deref, rc::Rc};
use std::ops::Deref;
use util::{path, separator};
use workspace::{AppState, Item};

View File

@@ -282,18 +282,15 @@ pub fn unordered_thread_entries(
text_thread_store: Entity<TextThreadStore>,
cx: &App,
) -> impl Iterator<Item = (DateTime<Utc>, ThreadContextEntry)> {
let threads = thread_store
.read(cx)
.reverse_chronological_threads()
.map(|thread| {
(
thread.updated_at,
ThreadContextEntry::Thread {
id: thread.id.clone(),
title: thread.summary.clone(),
},
)
});
let threads = thread_store.read(cx).unordered_threads().map(|thread| {
(
thread.updated_at,
ThreadContextEntry::Thread {
id: thread.id.clone(),
title: thread.summary.clone(),
},
)
});
let text_threads = text_thread_store
.read(cx)
@@ -303,7 +300,7 @@ pub fn unordered_thread_entries(
context.mtime.to_utc(),
ThreadContextEntry::Context {
path: context.path.clone(),
title: context.title.clone(),
title: context.title.clone().into(),
},
)
});

View File

@@ -104,15 +104,7 @@ impl Tool for ContextServerTool {
tool_name,
arguments
);
let response = protocol
.request::<context_server::types::requests::CallTool>(
context_server::types::CallToolParams {
name: tool_name,
arguments,
meta: None,
},
)
.await?;
let response = protocol.run_tool(tool_name, arguments).await?;
let mut result = String::new();
for content in response.content {
@@ -123,9 +115,6 @@ impl Tool for ContextServerTool {
types::ToolResponseContent::Image { .. } => {
log::warn!("Ignoring image content from tool response");
}
types::ToolResponseContent::Audio { .. } => {
log::warn!("Ignoring audio content from tool response");
}
types::ToolResponseContent::Resource { .. } => {
log::warn!("Ignoring resource content from tool response");
}

View File

@@ -1,17 +1,18 @@
use std::{collections::VecDeque, path::Path, sync::Arc};
use anyhow::{Context as _, Result};
use assistant_context_editor::SavedContextMetadata;
use anyhow::Context as _;
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
use chrono::{DateTime, Utc};
use gpui::{AsyncApp, Entity, SharedString, Task, prelude::*};
use itertools::Itertools;
use paths::contexts_dir;
use futures::future::{TryFutureExt as _, join_all};
use gpui::{Entity, Task, prelude::*};
use serde::{Deserialize, Serialize};
use smol::future::FutureExt;
use std::time::Duration;
use ui::App;
use ui::{App, SharedString, Window};
use util::ResultExt as _;
use crate::{
Thread,
thread::ThreadId,
thread_store::{SerializedThreadMetadata, ThreadStore},
};
@@ -40,34 +41,52 @@ impl HistoryEntry {
HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
}
}
pub fn title(&self) -> &SharedString {
match self {
HistoryEntry::Thread(thread) => &thread.summary,
HistoryEntry::Context(context) => &context.title,
}
}
}
/// Generic identifier for a history entry.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Eq)]
pub enum HistoryEntryId {
Thread(ThreadId),
Context(Arc<Path>),
}
#[derive(Clone, Debug)]
pub(crate) enum RecentEntry {
Thread(ThreadId, Entity<Thread>),
Context(Entity<AssistantContext>),
}
impl PartialEq for RecentEntry {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Thread(l0, _), Self::Thread(r0, _)) => l0 == r0,
(Self::Context(l0), Self::Context(r0)) => l0 == r0,
_ => false,
}
}
}
impl Eq for RecentEntry {}
impl RecentEntry {
pub(crate) fn summary(&self, cx: &App) -> SharedString {
match self {
RecentEntry::Thread(_, thread) => thread.read(cx).summary().or_default(),
RecentEntry::Context(context) => context.read(cx).summary().or_default(),
}
}
}
#[derive(Serialize, Deserialize)]
enum SerializedRecentOpen {
enum SerializedRecentEntry {
Thread(String),
ContextName(String),
/// Old format which stores the full path
Context(String),
}
pub struct HistoryStore {
thread_store: Entity<ThreadStore>,
context_store: Entity<assistant_context_editor::ContextStore>,
recently_opened_entries: VecDeque<HistoryEntryId>,
recently_opened_entries: VecDeque<RecentEntry>,
_subscriptions: Vec<gpui::Subscription>,
_save_recently_opened_entries_task: Task<()>,
}
@@ -76,7 +95,8 @@ impl HistoryStore {
pub fn new(
thread_store: Entity<ThreadStore>,
context_store: Entity<assistant_context_editor::ContextStore>,
initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
initial_recent_entries: impl IntoIterator<Item = RecentEntry>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let subscriptions = vec![
@@ -84,20 +104,68 @@ impl HistoryStore {
cx.observe(&context_store, |_, _, cx| cx.notify()),
];
cx.spawn(async move |this, cx| {
let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
this.update(cx, |this, _| {
this.recently_opened_entries
.extend(
entries.into_iter().take(
MAX_RECENTLY_OPENED_ENTRIES
.saturating_sub(this.recently_opened_entries.len()),
),
);
window
.spawn(cx, {
let thread_store = thread_store.downgrade();
let context_store = context_store.downgrade();
let this = cx.weak_entity();
async move |cx| {
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
let contents = cx
.background_spawn(async move { std::fs::read_to_string(path) })
.await
.ok()?;
let entries = serde_json::from_str::<Vec<SerializedRecentEntry>>(&contents)
.context("deserializing persisted agent panel navigation history")
.log_err()?
.into_iter()
.take(MAX_RECENTLY_OPENED_ENTRIES)
.map(|serialized| match serialized {
SerializedRecentEntry::Thread(id) => thread_store
.update_in(cx, |thread_store, window, cx| {
let thread_id = ThreadId::from(id.as_str());
thread_store
.open_thread(&thread_id, window, cx)
.map_ok(|thread| RecentEntry::Thread(thread_id, thread))
.boxed()
})
.unwrap_or_else(|_| {
async {
anyhow::bail!("no thread store");
}
.boxed()
}),
SerializedRecentEntry::Context(id) => context_store
.update(cx, |context_store, cx| {
context_store
.open_local_context(Path::new(&id).into(), cx)
.map_ok(RecentEntry::Context)
.boxed()
})
.unwrap_or_else(|_| {
async {
anyhow::bail!("no context store");
}
.boxed()
}),
});
let entries = join_all(entries)
.await
.into_iter()
.filter_map(|result| result.log_with_level(log::Level::Debug))
.collect::<VecDeque<_>>();
this.update(cx, |this, _| {
this.recently_opened_entries.extend(entries);
this.recently_opened_entries
.truncate(MAX_RECENTLY_OPENED_ENTRIES);
})
.ok();
Some(())
}
})
.ok()
})
.detach();
.detach();
Self {
thread_store,
@@ -116,20 +184,19 @@ impl HistoryStore {
return history_entries;
}
history_entries.extend(
self.thread_store
.read(cx)
.reverse_chronological_threads()
.cloned()
.map(HistoryEntry::Thread),
);
history_entries.extend(
self.context_store
.read(cx)
.unordered_contexts()
.cloned()
.map(HistoryEntry::Context),
);
for thread in self
.thread_store
.update(cx, |this, _cx| this.reverse_chronological_threads())
{
history_entries.push(HistoryEntry::Thread(thread));
}
for context in self
.context_store
.update(cx, |this, _cx| this.reverse_chronological_contexts())
{
history_entries.push(HistoryEntry::Context(context));
}
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
history_entries
@@ -139,62 +206,15 @@ impl HistoryStore {
self.entries(cx).into_iter().take(limit).collect()
}
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
#[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
return Vec::new();
}
let thread_entries = self
.thread_store
.read(cx)
.reverse_chronological_threads()
.flat_map(|thread| {
self.recently_opened_entries
.iter()
.enumerate()
.flat_map(|(index, entry)| match entry {
HistoryEntryId::Thread(id) if &thread.id == id => {
Some((index, HistoryEntry::Thread(thread.clone())))
}
_ => None,
})
});
let context_entries =
self.context_store
.read(cx)
.unordered_contexts()
.flat_map(|context| {
self.recently_opened_entries
.iter()
.enumerate()
.flat_map(|(index, entry)| match entry {
HistoryEntryId::Context(path) if &context.path == path => {
Some((index, HistoryEntry::Context(context.clone())))
}
_ => None,
})
});
thread_entries
.chain(context_entries)
// optimization to halt iteration early
.take(self.recently_opened_entries.len())
.sorted_unstable_by_key(|(index, _)| *index)
.map(|(_, entry)| entry)
.collect()
}
fn save_recently_opened_entries(&mut self, cx: &mut Context<Self>) {
let serialized_entries = self
.recently_opened_entries
.iter()
.filter_map(|entry| match entry {
HistoryEntryId::Context(path) => path.file_name().map(|file| {
SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
}),
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
RecentEntry::Context(context) => Some(SerializedRecentEntry::Context(
context.read(cx).path()?.to_str()?.to_owned(),
)),
RecentEntry::Thread(id, _) => Some(SerializedRecentEntry::Thread(id.to_string())),
})
.collect::<Vec<_>>();
@@ -213,33 +233,7 @@ impl HistoryStore {
});
}
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
cx.background_spawn(async move {
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
let contents = smol::fs::read_to_string(path).await?;
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
.context("deserializing persisted agent panel navigation history")?
.into_iter()
.take(MAX_RECENTLY_OPENED_ENTRIES)
.flat_map(|entry| match entry {
SerializedRecentOpen::Thread(id) => {
Some(HistoryEntryId::Thread(id.as_str().into()))
}
SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context(
contexts_dir().join(file_name).into(),
)),
SerializedRecentOpen::Context(path) => {
Path::new(&path).file_name().map(|file_name| {
HistoryEntryId::Context(contexts_dir().join(file_name).into())
})
}
})
.collect::<Vec<_>>();
Ok(entries)
})
}
pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context<Self>) {
pub fn push_recently_opened_entry(&mut self, entry: RecentEntry, cx: &mut Context<Self>) {
self.recently_opened_entries
.retain(|old_entry| old_entry != &entry);
self.recently_opened_entries.push_front(entry);
@@ -250,33 +244,24 @@ impl HistoryStore {
pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
self.recently_opened_entries.retain(|entry| match entry {
HistoryEntryId::Thread(thread_id) if thread_id == &id => false,
RecentEntry::Thread(thread_id, _) if thread_id == &id => false,
_ => true,
});
self.save_recently_opened_entries(cx);
}
pub fn replace_recently_opened_text_thread(
&mut self,
old_path: &Path,
new_path: &Arc<Path>,
cx: &mut Context<Self>,
) {
for entry in &mut self.recently_opened_entries {
match entry {
HistoryEntryId::Context(path) if path.as_ref() == old_path => {
*entry = HistoryEntryId::Context(new_path.clone());
break;
}
_ => {}
}
}
self.save_recently_opened_entries(cx);
}
pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context<Self>) {
pub fn remove_recently_opened_entry(&mut self, entry: &RecentEntry, cx: &mut Context<Self>) {
self.recently_opened_entries
.retain(|old_entry| old_entry != entry);
self.save_recently_opened_entries(cx);
}
pub fn recently_opened_entries(&self, _cx: &mut Context<Self>) -> VecDeque<RecentEntry> {
#[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
return VecDeque::new();
}
self.recently_opened_entries.clone()
}
}

View File

@@ -1011,7 +1011,7 @@ impl InlineAssistant {
self.update_editor_highlights(&editor, cx);
}
} else {
entry.get_mut().highlight_updates.send(()).ok();
entry.get().highlight_updates.send(()).ok();
}
}
@@ -1331,7 +1331,7 @@ impl InlineAssistant {
editor.clear_gutter_highlights::<GutterPendingRange>(cx);
} else {
editor.highlight_gutter::<GutterPendingRange>(
gutter_pending_ranges,
&gutter_pending_ranges,
|cx| cx.theme().status().info_background,
cx,
)
@@ -1342,7 +1342,7 @@ impl InlineAssistant {
editor.clear_gutter_highlights::<GutterTransformedRange>(cx);
} else {
editor.highlight_gutter::<GutterTransformedRange>(
gutter_transformed_ranges,
&gutter_transformed_ranges,
|cx| cx.theme().status().info,
cx,
)
@@ -1519,7 +1519,7 @@ impl InlineAssistant {
struct EditorInlineAssists {
assist_ids: Vec<InlineAssistId>,
scroll_lock: Option<InlineAssistScrollLock>,
highlight_updates: watch::Sender<()>,
highlight_updates: async_watch::Sender<()>,
_update_highlights: Task<Result<()>>,
_subscriptions: Vec<gpui::Subscription>,
}
@@ -1531,7 +1531,7 @@ struct InlineAssistScrollLock {
impl EditorInlineAssists {
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
let (highlight_updates_tx, mut highlight_updates_rx) = watch::channel(());
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
Self {
assist_ids: Vec::new(),
scroll_lock: None,
@@ -1689,7 +1689,7 @@ impl InlineAssist {
if let Some(editor) = editor.upgrade() {
InlineAssistant::update_global(cx, |this, cx| {
if let Some(editor_assists) =
this.assists_by_editor.get_mut(&editor.downgrade())
this.assists_by_editor.get(&editor.downgrade())
{
editor_assists.highlight_updates.send(()).ok();
}

View File

@@ -175,7 +175,8 @@ impl MessageEditor {
)
});
let incompatible_tools = cx.new(|cx| IncompatibleToolsState::new(thread.clone(), cx));
let incompatible_tools =
cx.new(|cx| IncompatibleToolsState::new(thread.read(cx).tools().clone(), cx));
let subscriptions = vec![
cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event),
@@ -203,8 +204,15 @@ impl MessageEditor {
)
});
let profile_selector =
cx.new(|cx| ProfileSelector::new(fs, thread.clone(), editor.focus_handle(cx), cx));
let profile_selector = cx.new(|cx| {
ProfileSelector::new(
fs,
thread.clone(),
thread_store,
editor.focus_handle(cx),
cx,
)
});
Self {
editor: editor.clone(),

View File

@@ -1,24 +1,26 @@
use std::sync::Arc;
use agent_settings::{AgentDockPosition, AgentProfileId, AgentSettings, builtin_profiles};
use agent_settings::{
AgentDockPosition, AgentProfile, AgentProfileId, AgentSettings, GroupedAgentProfiles,
builtin_profiles,
};
use fs::Fs;
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, prelude::*};
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, WeakEntity, prelude::*};
use language_model::LanguageModelRegistry;
use settings::{Settings as _, SettingsStore, update_settings_file};
use ui::{
ContextMenu, ContextMenuEntry, DocumentationSide, PopoverMenu, PopoverMenuHandle, Tooltip,
prelude::*,
};
use util::ResultExt as _;
use crate::{
ManageProfiles, Thread, ToggleProfileSelector,
agent_profile::{AgentProfile, AvailableProfiles},
};
use crate::{ManageProfiles, Thread, ThreadStore, ToggleProfileSelector};
pub struct ProfileSelector {
profiles: AvailableProfiles,
profiles: GroupedAgentProfiles,
fs: Arc<dyn Fs>,
thread: Entity<Thread>,
thread_store: WeakEntity<ThreadStore>,
menu_handle: PopoverMenuHandle<ContextMenu>,
focus_handle: FocusHandle,
_subscriptions: Vec<Subscription>,
@@ -28,6 +30,7 @@ impl ProfileSelector {
pub fn new(
fs: Arc<dyn Fs>,
thread: Entity<Thread>,
thread_store: WeakEntity<ThreadStore>,
focus_handle: FocusHandle,
cx: &mut Context<Self>,
) -> Self {
@@ -36,9 +39,10 @@ impl ProfileSelector {
});
Self {
profiles: AgentProfile::available_profiles(cx),
profiles: GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx)),
fs,
thread,
thread_store,
menu_handle: PopoverMenuHandle::default(),
focus_handle,
_subscriptions: vec![settings_subscription],
@@ -50,7 +54,7 @@ impl ProfileSelector {
}
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
self.profiles = AgentProfile::available_profiles(cx);
self.profiles = GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx));
}
fn build_context_menu(
@@ -60,30 +64,21 @@ impl ProfileSelector {
) -> Entity<ContextMenu> {
ContextMenu::build(window, cx, |mut menu, _window, cx| {
let settings = AgentSettings::get_global(cx);
let mut found_non_builtin = false;
for (profile_id, profile_name) in self.profiles.iter() {
if !builtin_profiles::is_builtin(profile_id) {
found_non_builtin = true;
continue;
}
for (profile_id, profile) in self.profiles.builtin.iter() {
menu = menu.item(self.menu_entry_for_profile(
profile_id.clone(),
profile_name,
profile,
settings,
cx,
));
}
if found_non_builtin {
if !self.profiles.custom.is_empty() {
menu = menu.separator().header("Custom Profiles");
for (profile_id, profile_name) in self.profiles.iter() {
if builtin_profiles::is_builtin(profile_id) {
continue;
}
for (profile_id, profile) in self.profiles.custom.iter() {
menu = menu.item(self.menu_entry_for_profile(
profile_id.clone(),
profile_name,
profile,
settings,
cx,
));
@@ -104,20 +99,19 @@ impl ProfileSelector {
fn menu_entry_for_profile(
&self,
profile_id: AgentProfileId,
profile_name: &SharedString,
profile: &AgentProfile,
settings: &AgentSettings,
cx: &App,
_cx: &App,
) -> ContextMenuEntry {
let documentation = match profile_name.to_lowercase().as_str() {
let documentation = match profile.name.to_lowercase().as_str() {
builtin_profiles::WRITE => Some("Get help to write anything."),
builtin_profiles::ASK => Some("Chat about your codebase."),
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
_ => None,
};
let thread_profile_id = self.thread.read(cx).profile().id();
let entry = ContextMenuEntry::new(profile_name.clone())
.toggleable(IconPosition::End, &profile_id == thread_profile_id);
let entry = ContextMenuEntry::new(profile.name.clone())
.toggleable(IconPosition::End, profile_id == settings.default_profile);
let entry = if let Some(doc_text) = documentation {
entry.documentation_aside(documentation_side(settings.dock), move |_| {
@@ -129,7 +123,7 @@ impl ProfileSelector {
entry.handler({
let fs = self.fs.clone();
let thread = self.thread.clone();
let thread_store = self.thread_store.clone();
let profile_id = profile_id.clone();
move |_window, cx| {
update_settings_file::<AgentSettings>(fs.clone(), cx, {
@@ -139,9 +133,11 @@ impl ProfileSelector {
}
});
thread.update(cx, |this, cx| {
this.set_profile(profile_id.clone(), cx);
});
thread_store
.update(cx, |this, cx| {
this.load_profile_by_id(profile_id.clone(), cx);
})
.log_err();
}
})
}
@@ -150,7 +146,7 @@ impl ProfileSelector {
impl Render for ProfileSelector {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let settings = AgentSettings::get_global(cx);
let profile_id = self.thread.read(cx).profile().id();
let profile_id = &settings.default_profile;
let profile = settings.profiles.get(profile_id);
let selected_profile = profile

View File

@@ -4,7 +4,7 @@ use std::ops::Range;
use std::sync::Arc;
use std::time::Instant;
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
use agent_settings::{AgentSettings, CompletionMode};
use anyhow::{Result, anyhow};
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
use chrono::{DateTime, Utc};
@@ -41,7 +41,6 @@ use uuid::Uuid;
use zed_llm_client::{CompletionIntent, CompletionRequestStatus};
use crate::ThreadStore;
use crate::agent_profile::AgentProfile;
use crate::context::{AgentContext, AgentContextHandle, ContextLoadResult, LoadedContext};
use crate::thread_store::{
SerializedCrease, SerializedLanguageModel, SerializedMessage, SerializedMessageSegment,
@@ -195,20 +194,20 @@ impl MessageSegment {
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectSnapshot {
pub worktree_snapshots: Vec<WorktreeSnapshot>,
pub unsaved_buffer_paths: Vec<String>,
pub timestamp: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WorktreeSnapshot {
pub worktree_path: String,
pub git_state: Option<GitState>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GitState {
pub remote_url: Option<String>,
pub head_sha: Option<String>,
@@ -247,7 +246,7 @@ impl LastRestoreCheckpoint {
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub enum DetailedSummaryState {
#[default]
NotGenerated,
@@ -361,7 +360,6 @@ pub struct Thread {
>,
remaining_turns: u32,
configured_model: Option<ConfiguredModel>,
profile: AgentProfile,
}
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -391,7 +389,7 @@ impl ThreadSummary {
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExceededWindowError {
/// Model used when last message exceeded context window
model_id: LanguageModelId,
@@ -409,7 +407,6 @@ impl Thread {
) -> Self {
let (detailed_summary_tx, detailed_summary_rx) = postage::watch::channel();
let configured_model = LanguageModelRegistry::read_global(cx).default_model();
let profile_id = AgentSettings::get_global(cx).default_profile.clone();
Self {
id: ThreadId::new(),
@@ -452,7 +449,6 @@ impl Thread {
request_callback: None,
remaining_turns: u32::MAX,
configured_model,
profile: AgentProfile::new(profile_id, tools),
}
}
@@ -499,9 +495,6 @@ impl Thread {
let completion_mode = serialized
.completion_mode
.unwrap_or_else(|| AgentSettings::get_global(cx).preferred_completion_mode);
let profile_id = serialized
.profile
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
Self {
id,
@@ -561,7 +554,7 @@ impl Thread {
pending_checkpoint: None,
project: project.clone(),
prompt_builder,
tools: tools.clone(),
tools,
tool_use,
action_log: cx.new(|_| ActionLog::new(project)),
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
@@ -577,7 +570,6 @@ impl Thread {
request_callback: None,
remaining_turns: u32::MAX,
configured_model,
profile: AgentProfile::new(profile_id, tools),
}
}
@@ -593,17 +585,6 @@ impl Thread {
&self.id
}
pub fn profile(&self) -> &AgentProfile {
&self.profile
}
pub fn set_profile(&mut self, id: AgentProfileId, cx: &mut Context<Self>) {
if &id != self.profile.id() {
self.profile = AgentProfile::new(id, self.tools.clone());
cx.emit(ThreadEvent::ProfileChanged);
}
}
pub fn is_empty(&self) -> bool {
self.messages.is_empty()
}
@@ -938,7 +919,8 @@ impl Thread {
model: Arc<dyn LanguageModel>,
) -> Vec<LanguageModelRequestTool> {
if model.supports_tools() {
self.profile
self.tools()
.read(cx)
.enabled_tools(cx)
.into_iter()
.filter_map(|tool| {
@@ -1198,7 +1180,6 @@ impl Thread {
}),
completion_mode: Some(this.completion_mode),
tool_use_limit_reached: this.tool_use_limit_reached,
profile: Some(this.profile.id().clone()),
})
})
}
@@ -1563,9 +1544,6 @@ impl Thread {
Err(LanguageModelCompletionError::Other(error)) => {
return Err(error);
}
Err(err @ LanguageModelCompletionError::RateLimit(..)) => {
return Err(err.into());
}
};
match event {
@@ -2143,7 +2121,7 @@ impl Thread {
window: Option<AnyWindowHandle>,
cx: &mut Context<Thread>,
) {
let available_tools = self.profile.enabled_tools(cx);
let available_tools = self.tools.read(cx).enabled_tools(cx);
let tool_list = available_tools
.iter()
@@ -2235,15 +2213,19 @@ impl Thread {
) -> Task<()> {
let tool_name: Arc<str> = tool.name().into();
let tool_result = tool.run(
input,
request,
self.project.clone(),
self.action_log.clone(),
model,
window,
cx,
);
let tool_result = if self.tools.read(cx).is_disabled(&tool.source(), &tool_name) {
Task::ready(Err(anyhow!("tool is disabled: {tool_name}"))).into()
} else {
tool.run(
input,
request,
self.project.clone(),
self.action_log.clone(),
model,
window,
cx,
)
};
// Store the card separately if it exists
if let Some(card) = tool_result.card.clone() {
@@ -2362,7 +2344,8 @@ impl Thread {
let client = self.project.read(cx).client();
let enabled_tool_names: Vec<String> = self
.profile
.tools()
.read(cx)
.enabled_tools(cx)
.iter()
.map(|tool| tool.name())
@@ -2875,7 +2858,6 @@ pub enum ThreadEvent {
ToolUseLimitReached,
CancelEditing,
CompletionCanceled,
ProfileChanged,
}
impl EventEmitter<ThreadEvent> for Thread {}
@@ -2890,7 +2872,7 @@ struct PendingCompletion {
mod tests {
use super::*;
use crate::{ThreadStore, context::load_context, context_store::ContextStore, thread_store};
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelParameters};
use agent_settings::{AgentSettings, LanguageModelParameters};
use assistant_tool::ToolRegistry;
use editor::EditorSettings;
use gpui::TestAppContext;
@@ -3303,71 +3285,6 @@ fn main() {{
);
}
#[gpui::test]
async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) {
init_test_settings(cx);
let project = create_test_project(
cx,
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
)
.await;
let (_workspace, thread_store, thread, _context_store, _model) =
setup_test_environment(cx, project.clone()).await;
// Check that we are starting with the default profile
let profile = cx.read(|cx| thread.read(cx).profile.clone());
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
assert_eq!(
profile,
AgentProfile::new(AgentProfileId::default(), tool_set)
);
}
#[gpui::test]
async fn test_serializing_thread_profile(cx: &mut TestAppContext) {
init_test_settings(cx);
let project = create_test_project(
cx,
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
)
.await;
let (_workspace, thread_store, thread, _context_store, _model) =
setup_test_environment(cx, project.clone()).await;
// Profile gets serialized with default values
let serialized = thread
.update(cx, |thread, cx| thread.serialize(cx))
.await
.unwrap();
assert_eq!(serialized.profile, Some(AgentProfileId::default()));
let deserialized = cx.update(|cx| {
thread.update(cx, |thread, cx| {
Thread::deserialize(
thread.id.clone(),
serialized,
thread.project.clone(),
thread.tools.clone(),
thread.prompt_builder.clone(),
thread.project_context.clone(),
None,
cx,
)
})
});
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
assert_eq!(
deserialized.profile,
AgentProfile::new(AgentProfileId::default(), tool_set)
);
}
#[gpui::test]
async fn test_temperature_setting(cx: &mut TestAppContext) {
init_test_settings(cx);

View File

@@ -671,7 +671,7 @@ impl RenderOnce for HistoryEntryElement {
),
HistoryEntry::Context(context) => (
context.path.to_string_lossy().to_string(),
context.title.clone(),
context.title.clone().into(),
context.mtime.timestamp(),
),
};

View File

@@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::{Arc, Mutex};
use agent_settings::{AgentProfileId, CompletionMode};
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, CompletionMode};
use anyhow::{Context as _, Result, anyhow};
use assistant_tool::{ToolId, ToolWorkingSet};
use assistant_tool::{ToolId, ToolSource, ToolWorkingSet};
use chrono::{DateTime, Utc};
use collections::HashMap;
use context_server::ContextServerId;
@@ -25,6 +25,7 @@ use prompt_store::{
UserRulesContext, WorktreeContext,
};
use serde::{Deserialize, Serialize};
use settings::{Settings as _, SettingsStore};
use ui::Window;
use util::ResultExt as _;
@@ -89,7 +90,7 @@ pub fn init(cx: &mut App) {
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
impl SharedProjectContext {
pub fn borrow(&self) -> Ref<'_, Option<ProjectContext>> {
pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
self.0.borrow()
}
}
@@ -146,7 +147,12 @@ impl ThreadStore {
prompt_store: Option<Entity<PromptStore>>,
cx: &mut Context<Self>,
) -> (Self, oneshot::Receiver<()>) {
let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)];
let mut subscriptions = vec![
cx.observe_global::<SettingsStore>(move |this: &mut Self, cx| {
this.load_default_profile(cx);
}),
cx.subscribe(&project, Self::handle_project_event),
];
if let Some(prompt_store) = prompt_store.as_ref() {
subscriptions.push(cx.subscribe(
@@ -194,6 +200,7 @@ impl ThreadStore {
_reload_system_prompt_task: reload_system_prompt_task,
_subscriptions: subscriptions,
};
this.load_default_profile(cx);
this.register_context_server_handlers(cx);
this.reload(cx).detach_and_log_err(cx);
(this, ready_rx)
@@ -393,11 +400,16 @@ impl ThreadStore {
self.threads.len()
}
pub fn reverse_chronological_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
// ordering is from "ORDER BY" in `list_threads`
pub fn unordered_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
self.threads.iter()
}
pub fn reverse_chronological_threads(&self) -> Vec<SerializedThreadMetadata> {
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
threads
}
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
cx.new(|cx| {
Thread::new(
@@ -508,17 +520,94 @@ impl ThreadStore {
})
}
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
let context_server_store = self.project.read(cx).context_server_store();
cx.subscribe(&context_server_store, Self::handle_context_server_event)
.detach();
fn load_default_profile(&self, cx: &mut Context<Self>) {
let assistant_settings = AgentSettings::get_global(cx);
// Check for any servers that were already running before the handler was registered
for server in context_server_store.read(cx).running_servers() {
self.load_context_server_tools(server.id(), context_server_store.clone(), cx);
self.load_profile_by_id(assistant_settings.default_profile.clone(), cx);
}
pub fn load_profile_by_id(&self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
let assistant_settings = AgentSettings::get_global(cx);
if let Some(profile) = assistant_settings.profiles.get(&profile_id) {
self.load_profile(profile.clone(), cx);
}
}
pub fn load_profile(&self, profile: AgentProfile, cx: &mut Context<Self>) {
self.tools.update(cx, |tools, cx| {
tools.disable_all_tools(cx);
tools.enable(
ToolSource::Native,
&profile
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool))
.collect::<Vec<_>>(),
cx,
);
});
if profile.enable_all_context_servers {
for context_server_id in self
.project
.read(cx)
.context_server_store()
.read(cx)
.all_server_ids()
{
self.tools.update(cx, |tools, cx| {
tools.enable_source(
ToolSource::ContextServer {
id: context_server_id.0.into(),
},
cx,
);
});
}
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
for (context_server_id, preset) in profile.context_servers {
self.tools.update(cx, |tools, cx| {
tools.disable(
ToolSource::ContextServer {
id: context_server_id.into(),
},
&preset
.tools
.into_iter()
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
.collect::<Vec<_>>(),
cx,
)
})
}
} else {
for (context_server_id, preset) in profile.context_servers {
self.tools.update(cx, |tools, cx| {
tools.enable(
ToolSource::ContextServer {
id: context_server_id.into(),
},
&preset
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool))
.collect::<Vec<_>>(),
cx,
)
})
}
}
}
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
cx.subscribe(
&self.project.read(cx).context_server_store(),
Self::handle_context_server_event,
)
.detach();
}
fn handle_context_server_event(
&mut self,
context_server_store: Entity<ContextServerStore>,
@@ -529,71 +618,71 @@ impl ThreadStore {
match event {
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
match status {
ContextServerStatus::Starting => {}
ContextServerStatus::Running => {
self.load_context_server_tools(server_id.clone(), context_server_store, cx);
if let Some(server) =
context_server_store.read(cx).get_running_server(server_id)
{
let context_server_manager = context_server_store.clone();
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
if let Some(tools) = protocol.list_tools().await.log_err() {
let tool_ids = tool_working_set
.update(cx, |tool_working_set, _| {
tools
.tools
.into_iter()
.map(|tool| {
log::info!(
"registering context server tool: {:?}",
tool.name
);
tool_working_set.insert(Arc::new(
ContextServerTool::new(
context_server_manager.clone(),
server.id(),
tool,
),
))
})
.collect::<Vec<_>>()
})
.log_err();
if let Some(tool_ids) = tool_ids {
this.update(cx, |this, cx| {
this.context_server_tool_ids
.insert(server_id, tool_ids);
this.load_default_profile(cx);
})
.log_err();
}
}
}
}
})
.detach();
}
}
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
tool_working_set.update(cx, |tool_working_set, _| {
tool_working_set.remove(&tool_ids);
});
self.load_default_profile(cx);
}
}
_ => {}
}
}
}
}
fn load_context_server_tools(
&self,
server_id: ContextServerId,
context_server_store: Entity<ContextServerStore>,
cx: &mut Context<Self>,
) {
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
return;
};
let tool_working_set = self.tools.clone();
cx.spawn(async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
if let Some(response) = protocol
.request::<context_server::types::requests::ListTools>(())
.await
.log_err()
{
let tool_ids = tool_working_set
.update(cx, |tool_working_set, _| {
response
.tools
.into_iter()
.map(|tool| {
log::info!("registering context server tool: {:?}", tool.name);
tool_working_set.insert(Arc::new(ContextServerTool::new(
context_server_store.clone(),
server.id(),
tool,
)))
})
.collect::<Vec<_>>()
})
.log_err();
if let Some(tool_ids) = tool_ids {
this.update(cx, |this, _| {
this.context_server_tool_ids.insert(server_id, tool_ids);
})
.log_err();
}
}
}
})
.detach();
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -603,7 +692,7 @@ pub struct SerializedThreadMetadata {
pub updated_at: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[derive(Serialize, Deserialize, Debug)]
pub struct SerializedThread {
pub version: String,
pub summary: SharedString,
@@ -625,11 +714,9 @@ pub struct SerializedThread {
pub completion_mode: Option<CompletionMode>,
#[serde(default)]
pub tool_use_limit_reached: bool,
#[serde(default)]
pub profile: Option<AgentProfileId>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
#[derive(Serialize, Deserialize, Debug)]
pub struct SerializedLanguageModel {
pub provider: String,
pub model: String,
@@ -690,15 +777,11 @@ impl SerializedThreadV0_1_0 {
messages.push(message);
}
SerializedThread {
messages,
version: SerializedThread::VERSION.to_string(),
..self.0
}
SerializedThread { messages, ..self.0 }
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize)]
pub struct SerializedMessage {
pub id: MessageId,
pub role: Role,
@@ -716,7 +799,7 @@ pub struct SerializedMessage {
pub is_hidden: bool,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum SerializedMessageSegment {
#[serde(rename = "text")]
@@ -734,14 +817,14 @@ pub enum SerializedMessageSegment {
},
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize)]
pub struct SerializedToolUse {
pub id: LanguageModelToolUseId,
pub name: SharedString,
pub input: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize)]
pub struct SerializedToolResult {
pub tool_use_id: LanguageModelToolUseId,
pub is_error: bool,
@@ -773,7 +856,6 @@ impl LegacySerializedThread {
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None,
}
}
}
@@ -804,7 +886,7 @@ impl LegacySerializedMessage {
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[derive(Debug, Serialize, Deserialize)]
pub struct SerializedCrease {
pub start: usize,
pub end: usize,
@@ -923,7 +1005,7 @@ impl ThreadsDatabase {
fn bytes_encode(
item: &Self::EItem,
) -> Result<std::borrow::Cow<'_, [u8]>, heed::BoxedError> {
) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
serde_json::to_vec(&item.0)
.map(std::borrow::Cow::Owned)
.map_err(Into::into)
@@ -1061,181 +1143,3 @@ impl ThreadsDatabase {
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::thread::{DetailedSummaryState, MessageId};
use chrono::Utc;
use language_model::{Role, TokenUsage};
use pretty_assertions::assert_eq;
#[test]
fn test_legacy_serialized_thread_upgrade() {
let updated_at = Utc::now();
let legacy_thread = LegacySerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![LegacySerializedMessage {
id: MessageId(1),
role: Role::User,
text: "Hello, world!".to_string(),
tool_uses: vec![],
tool_results: vec![],
}],
initial_project_snapshot: None,
};
let upgraded = legacy_thread.upgrade();
assert_eq!(
upgraded,
SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Hello, world!".to_string()
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false
}],
version: SerializedThread::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
exceeded_window_error: None,
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None
}
)
}
#[test]
fn test_serialized_threadv0_1_0_upgrade() {
let updated_at = Utc::now();
let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Use tool_1".to_string(),
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
SerializedMessage {
id: MessageId(2),
role: Role::Assistant,
segments: vec![SerializedMessageSegment::Text {
text: "I want to use a tool".to_string(),
}],
tool_uses: vec![SerializedToolUse {
id: "abc".into(),
name: "tool_1".into(),
input: serde_json::Value::Null,
}],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Here is the tool result".to_string(),
}],
tool_uses: vec![],
tool_results: vec![SerializedToolResult {
tool_use_id: "abc".into(),
is_error: false,
content: LanguageModelToolResultContent::Text("abcdef".into()),
output: Some(serde_json::Value::Null),
}],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
],
version: SerializedThreadV0_1_0::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
exceeded_window_error: None,
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None,
});
let upgraded = thread_v0_1_0.upgrade();
assert_eq!(
upgraded,
SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Use tool_1".to_string()
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false
},
SerializedMessage {
id: MessageId(2),
role: Role::Assistant,
segments: vec![SerializedMessageSegment::Text {
text: "I want to use a tool".to_string(),
}],
tool_uses: vec![SerializedToolUse {
id: "abc".into(),
name: "tool_1".into(),
input: serde_json::Value::Null,
}],
tool_results: vec![SerializedToolResult {
tool_use_id: "abc".into(),
is_error: false,
content: LanguageModelToolResultContent::Text("abcdef".into()),
output: Some(serde_json::Value::Null),
}],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
],
version: SerializedThread::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
exceeded_window_error: None,
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None
}
)
}
}

View File

@@ -1,33 +1,30 @@
use std::sync::Arc;
use assistant_tool::{Tool, ToolSource};
use assistant_tool::{Tool, ToolSource, ToolWorkingSet, ToolWorkingSetEvent};
use collections::HashMap;
use gpui::{App, Context, Entity, IntoElement, Render, Subscription, Window};
use language_model::{LanguageModel, LanguageModelToolSchemaFormat};
use ui::prelude::*;
use crate::{Thread, ThreadEvent};
pub struct IncompatibleToolsState {
cache: HashMap<LanguageModelToolSchemaFormat, Vec<Arc<dyn Tool>>>,
thread: Entity<Thread>,
_thread_subscription: Subscription,
tool_working_set: Entity<ToolWorkingSet>,
_tool_working_set_subscription: Subscription,
}
impl IncompatibleToolsState {
pub fn new(thread: Entity<Thread>, cx: &mut Context<Self>) -> Self {
pub fn new(tool_working_set: Entity<ToolWorkingSet>, cx: &mut Context<Self>) -> Self {
let _tool_working_set_subscription =
cx.subscribe(&thread, |this, _, event, _| match event {
ThreadEvent::ProfileChanged => {
cx.subscribe(&tool_working_set, |this, _, event, _| match event {
ToolWorkingSetEvent::EnabledToolsChanged => {
this.cache.clear();
}
_ => {}
});
Self {
cache: HashMap::default(),
thread,
_thread_subscription: _tool_working_set_subscription,
tool_working_set,
_tool_working_set_subscription,
}
}
@@ -39,9 +36,8 @@ impl IncompatibleToolsState {
self.cache
.entry(model.tool_input_format())
.or_insert_with(|| {
self.thread
self.tool_working_set
.read(cx)
.profile()
.enabled_tools(cx)
.iter()
.filter(|tool| tool.input_schema(model.tool_input_format()).is_err())

View File

@@ -16,6 +16,7 @@ anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
collections.workspace = true
gpui.workspace = true
indexmap.workspace = true
language_model.workspace = true
lmstudio = { workspace = true, features = ["schemars"] }
log.workspace = true

View File

@@ -17,6 +17,29 @@ pub mod builtin_profiles {
}
}
#[derive(Default)]
pub struct GroupedAgentProfiles {
pub builtin: IndexMap<AgentProfileId, AgentProfile>,
pub custom: IndexMap<AgentProfileId, AgentProfile>,
}
impl GroupedAgentProfiles {
pub fn from_settings(settings: &crate::AgentSettings) -> Self {
let mut builtin = IndexMap::default();
let mut custom = IndexMap::default();
for (profile_id, profile) in settings.profiles.clone() {
if builtin_profiles::is_builtin(&profile_id) {
builtin.insert(profile_id, profile);
} else {
custom.insert(profile_id, profile);
}
}
Self { builtin, custom }
}
}
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, JsonSchema)]
pub struct AgentProfileId(pub Arc<str>);
@@ -40,7 +63,7 @@ impl Default for AgentProfileId {
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfileSettings {
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: IndexMap<Arc<str>, bool>,

View File

@@ -102,7 +102,7 @@ pub struct AgentSettings {
pub using_outdated_settings_version: bool,
pub default_profile: AgentProfileId,
pub default_view: DefaultView,
pub profiles: IndexMap<AgentProfileId, AgentProfileSettings>,
pub profiles: IndexMap<AgentProfileId, AgentProfile>,
pub always_allow_tool_actions: bool,
pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
pub play_sound_when_agent_done: bool,
@@ -531,7 +531,7 @@ impl AgentSettingsContent {
pub fn create_profile(
&mut self,
profile_id: AgentProfileId,
profile_settings: AgentProfileSettings,
profile: AgentProfile,
) -> Result<()> {
self.v2_setting(|settings| {
let profiles = settings.profiles.get_or_insert_default();
@@ -542,10 +542,10 @@ impl AgentSettingsContent {
profiles.insert(
profile_id,
AgentProfileContent {
name: profile_settings.name.into(),
tools: profile_settings.tools,
enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
context_servers: profile_settings
name: profile.name.into(),
tools: profile.tools,
enable_all_context_servers: Some(profile.enable_all_context_servers),
context_servers: profile
.context_servers
.into_iter()
.map(|(server_id, preset)| {
@@ -910,7 +910,7 @@ impl Settings for AgentSettings {
.extend(profiles.into_iter().map(|(id, profile)| {
(
id,
AgentProfileSettings {
AgentProfile {
name: profile.name.into(),
tools: profile.tools,
enable_all_context_servers: profile

View File

@@ -1,5 +1,4 @@
use std::str::FromStr;
use std::time::Duration;
use anyhow::{Context as _, Result, anyhow};
use chrono::{DateTime, Utc};
@@ -407,7 +406,6 @@ impl RateLimit {
/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
#[derive(Debug)]
pub struct RateLimitInfo {
pub retry_after: Option<Duration>,
pub requests: Option<RateLimit>,
pub tokens: Option<RateLimit>,
pub input_tokens: Option<RateLimit>,
@@ -419,11 +417,10 @@ impl RateLimitInfo {
// Check if any rate limit headers exist
let has_rate_limit_headers = headers
.keys()
.any(|k| k == "retry-after" || k.as_str().starts_with("anthropic-ratelimit-"));
.any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
if !has_rate_limit_headers {
return Self {
retry_after: None,
requests: None,
tokens: None,
input_tokens: None,
@@ -432,11 +429,6 @@ impl RateLimitInfo {
}
Self {
retry_after: headers
.get("retry-after")
.and_then(|v| v.to_str().ok())
.and_then(|v| v.parse::<u64>().ok())
.map(Duration::from_secs),
requests: RateLimit::from_headers("requests", headers).ok(),
tokens: RateLimit::from_headers("tokens", headers).ok(),
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
@@ -489,8 +481,8 @@ pub async fn stream_completion_with_rate_limit_info(
.send(request)
.await
.context("failed to send request to Anthropic")?;
let rate_limits = RateLimitInfo::from_headers(response.headers());
if response.status().is_success() {
let rate_limits = RateLimitInfo::from_headers(response.headers());
let reader = BufReader::new(response.into_body());
let stream = reader
.lines()
@@ -508,8 +500,6 @@ pub async fn stream_completion_with_rate_limit_info(
})
.boxed();
Ok((stream, Some(rate_limits)))
} else if let Some(retry_after) = rate_limits.retry_after {
Err(AnthropicError::RateLimit(retry_after))
} else {
let mut body = Vec::new();
response
@@ -779,8 +769,6 @@ pub struct MessageDelta {
#[derive(Error, Debug)]
pub enum AnthropicError {
#[error("rate limit exceeded, retry after {0:?}")]
RateLimit(Duration),
#[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
ApiError(ApiError),
#[error("{0}")]

View File

@@ -11,7 +11,7 @@ use assistant_slash_commands::FileCommandMetadata;
use client::{self, proto, telemetry::Telemetry};
use clock::ReplicaId;
use collections::{HashMap, HashSet};
use fs::{Fs, RenameOptions};
use fs::{Fs, RemoveOptions};
use futures::{FutureExt, StreamExt, future::Shared};
use gpui::{
App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription,
@@ -452,10 +452,6 @@ pub enum ContextEvent {
MessagesEdited,
SummaryChanged,
SummaryGenerated,
PathChanged {
old_path: Option<Arc<Path>>,
new_path: Arc<Path>,
},
StreamedCompletion,
StartedThoughtProcess(Range<language::Anchor>),
EndedThoughtProcess(language::Anchor),
@@ -2898,34 +2894,22 @@ impl AssistantContext {
}
fs.create_dir(contexts_dir().as_ref()).await?;
// rename before write ensures that only one file exists
if let Some(old_path) = old_path.as_ref() {
fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
.await?;
if let Some(old_path) = old_path {
if new_path.as_path() != old_path.as_ref() {
fs.rename(
fs.remove_file(
&old_path,
&new_path,
RenameOptions {
overwrite: true,
ignore_if_exists: true,
RemoveOptions {
recursive: false,
ignore_if_not_exists: true,
},
)
.await?;
}
}
// update path before write in case it fails
this.update(cx, {
let new_path: Arc<Path> = new_path.clone().into();
move |this, cx| {
this.path = Some(new_path.clone());
cx.emit(ContextEvent::PathChanged { old_path, new_path });
}
})
.ok();
fs.atomic_write(new_path, serde_json::to_string(&context).unwrap())
.await?;
this.update(cx, |this, _| this.path = Some(new_path.into()))?;
}
Ok(())
@@ -3293,7 +3277,7 @@ impl SavedContextV0_1_0 {
#[derive(Debug, Clone)]
pub struct SavedContextMetadata {
pub title: SharedString,
pub title: String,
pub path: Arc<Path>,
pub mtime: chrono::DateTime<chrono::Local>,
}

View File

@@ -580,7 +580,6 @@ impl ContextEditor {
});
}
ContextEvent::SummaryGenerated => {}
ContextEvent::PathChanged { .. } => {}
ContextEvent::StartedThoughtProcess(range) => {
let creases = self.insert_thought_process_output_sections(
[(

View File

@@ -347,6 +347,12 @@ impl ContextStore {
self.contexts_metadata.iter()
}
pub fn reverse_chronological_contexts(&self) -> Vec<SavedContextMetadata> {
let mut contexts = self.contexts_metadata.iter().cloned().collect::<Vec<_>>();
contexts.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.mtime));
contexts
}
pub fn create(&mut self, cx: &mut Context<Self>) -> Entity<AssistantContext> {
let context = cx.new(|cx| {
AssistantContext::local(
@@ -612,16 +618,6 @@ impl ContextStore {
ContextEvent::SummaryChanged => {
self.advertise_contexts(cx);
}
ContextEvent::PathChanged { old_path, new_path } => {
if let Some(old_path) = old_path.as_ref() {
for metadata in &mut self.contexts_metadata {
if &metadata.path == old_path {
metadata.path = new_path.clone();
break;
}
}
}
}
ContextEvent::Operation(operation) => {
let context_id = context.read(cx).id().to_proto();
let operation = operation.to_proto();
@@ -796,7 +792,7 @@ impl ContextStore {
.next()
{
contexts.push(SavedContextMetadata {
title: title.to_string().into(),
title: title.to_string(),
path: path.into(),
mtime: metadata.mtime.timestamp_for_user().into(),
});
@@ -813,37 +809,74 @@ impl ContextStore {
}
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
let context_server_store = self.project.read(cx).context_server_store();
cx.subscribe(&context_server_store, Self::handle_context_server_event)
.detach();
// Check for any servers that were already running before the handler was registered
for server in context_server_store.read(cx).running_servers() {
self.load_context_server_slash_commands(server.id(), context_server_store.clone(), cx);
}
cx.subscribe(
&self.project.read(cx).context_server_store(),
Self::handle_context_server_event,
)
.detach();
}
fn handle_context_server_event(
&mut self,
context_server_store: Entity<ContextServerStore>,
context_server_manager: Entity<ContextServerStore>,
event: &project::context_server_store::Event,
cx: &mut Context<Self>,
) {
let slash_command_working_set = self.slash_commands.clone();
match event {
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
match status {
ContextServerStatus::Running => {
self.load_context_server_slash_commands(
server_id.clone(),
context_server_store.clone(),
cx,
);
if let Some(server) = context_server_manager
.read(cx)
.get_running_server(server_id)
{
let context_server_manager = context_server_manager.clone();
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
if let Some(prompts) = protocol.list_prompts().await.log_err() {
let slash_command_ids = prompts
.into_iter()
.filter(assistant_slash_commands::acceptable_prompt)
.map(|prompt| {
log::info!(
"registering context server command: {:?}",
prompt.name
);
slash_command_working_set.insert(Arc::new(
assistant_slash_commands::ContextServerSlashCommand::new(
context_server_manager.clone(),
server.id(),
prompt,
),
))
})
.collect::<Vec<_>>();
this.update( cx, |this, _cx| {
this.context_server_slash_command_ids
.insert(server_id.clone(), slash_command_ids);
})
.log_err();
}
}
}
})
.detach();
}
}
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
if let Some(slash_command_ids) =
self.context_server_slash_command_ids.remove(server_id)
{
self.slash_commands.remove(&slash_command_ids);
slash_command_working_set.remove(&slash_command_ids);
}
}
_ => {}
@@ -851,52 +884,4 @@ impl ContextStore {
}
}
}
fn load_context_server_slash_commands(
&self,
server_id: ContextServerId,
context_server_store: Entity<ContextServerStore>,
cx: &mut Context<Self>,
) {
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
return;
};
let slash_command_working_set = self.slash_commands.clone();
cx.spawn(async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
if let Some(response) = protocol
.request::<context_server::types::requests::PromptsList>(())
.await
.log_err()
{
let slash_command_ids = response
.prompts
.into_iter()
.filter(assistant_slash_commands::acceptable_prompt)
.map(|prompt| {
log::info!("registering context server command: {:?}", prompt.name);
slash_command_working_set.insert(Arc::new(
assistant_slash_commands::ContextServerSlashCommand::new(
context_server_store.clone(),
server.id(),
prompt,
),
))
})
.collect::<Vec<_>>();
this.update(cx, |this, _cx| {
this.context_server_slash_command_ids
.insert(server_id.clone(), slash_command_ids);
})
.log_err();
}
}
})
.detach();
}
}

View File

@@ -682,12 +682,11 @@ mod tests {
_: &AsyncApp,
) -> BoxFuture<
'static,
Result<
http_client::Result<
BoxStream<
'static,
Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
http_client::Result<LanguageModelCompletionEvent, LanguageModelCompletionError>,
>,
LanguageModelCompletionError,
>,
> {
unimplemented!()

View File

@@ -10,7 +10,9 @@ use parking_lot::Mutex;
use project::{CompletionIntent, CompletionSource, lsp_store::CompletionDocumentation};
use rope::Point;
use std::{
cell::RefCell,
ops::Range,
rc::Rc,
sync::{
Arc,
atomic::{AtomicBool, Ordering::SeqCst},
@@ -238,14 +240,13 @@ impl SlashCommandCompletionProvider {
Ok(vec![project::CompletionResponse {
completions,
// TODO: Could have slash commands indicate whether their completions are incomplete.
is_incomplete: true,
is_incomplete: false,
}])
})
} else {
Task::ready(Ok(vec![project::CompletionResponse {
completions: Vec::new(),
is_incomplete: true,
is_incomplete: false,
}]))
}
}
@@ -274,17 +275,17 @@ impl CompletionProvider for SlashCommandCompletionProvider {
position.row,
call.arguments.last().map_or(call.name.end, |arg| arg.end) as u32,
);
let command_range = buffer.anchor_before(command_range_start)
let command_range = buffer.anchor_after(command_range_start)
..buffer.anchor_after(command_range_end);
let name = line[call.name.clone()].to_string();
let (arguments, last_argument_range) = if let Some(argument) = call.arguments.last()
{
let last_arg_start =
buffer.anchor_before(Point::new(position.row, argument.start as u32));
buffer.anchor_after(Point::new(position.row, argument.start as u32));
let first_arg_start = call.arguments.first().expect("we have the last element");
let first_arg_start = buffer
.anchor_before(Point::new(position.row, first_arg_start.start as u32));
let first_arg_start =
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
let arguments = call
.arguments
.into_iter()
@@ -297,7 +298,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
)
} else {
let start =
buffer.anchor_before(Point::new(position.row, call.name.start as u32));
buffer.anchor_after(Point::new(position.row, call.name.start as u32));
(None, start..buffer_position)
};
@@ -325,6 +326,16 @@ impl CompletionProvider for SlashCommandCompletionProvider {
}
}
fn resolve_completions(
&self,
_: Entity<Buffer>,
_: Vec<usize>,
_: Rc<RefCell<Box<[project::Completion]>>>,
_: &mut Context<Editor>,
) -> Task<Result<bool>> {
Task::ready(Ok(true))
}
fn is_completion_trigger(
&self,
buffer: &Entity<Buffer>,

View File

@@ -86,26 +86,20 @@ impl SlashCommand for ContextServerSlashCommand {
cx.foreground_executor().spawn(async move {
let protocol = server.client().context("Context server not initialized")?;
let response = protocol
.request::<context_server::types::requests::CompletionComplete>(
context_server::types::CompletionCompleteParams {
reference: context_server::types::CompletionReference::Prompt(
context_server::types::PromptReference {
ty: context_server::types::PromptReferenceType::Prompt,
name: prompt_name,
},
),
argument: context_server::types::CompletionArgument {
name: arg_name,
value: arg_value,
let completion_result = protocol
.completion(
context_server::types::CompletionReference::Prompt(
context_server::types::PromptReference {
r#type: context_server::types::PromptReferenceType::Prompt,
name: prompt_name,
},
meta: None,
},
),
arg_name,
arg_value,
)
.await?;
let completions = response
.completion
let completions = completion_result
.values
.into_iter()
.map(|value| ArgumentCompletion {
@@ -144,18 +138,10 @@ impl SlashCommand for ContextServerSlashCommand {
if let Some(server) = store.get_running_server(&server_id) {
cx.foreground_executor().spawn(async move {
let protocol = server.client().context("Context server not initialized")?;
let response = protocol
.request::<context_server::types::requests::PromptsGet>(
context_server::types::PromptsGetParams {
name: prompt_name.clone(),
arguments: Some(prompt_args),
meta: None,
},
)
.await?;
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
anyhow::ensure!(
response
result
.messages
.iter()
.all(|msg| matches!(msg.role, context_server::types::Role::User)),
@@ -163,7 +149,7 @@ impl SlashCommand for ContextServerSlashCommand {
);
// Extract text from user messages into a single prompt string
let mut prompt = response
let mut prompt = result
.messages
.into_iter()
.filter_map(|msg| match msg.content {
@@ -181,7 +167,7 @@ impl SlashCommand for ContextServerSlashCommand {
range: 0..(prompt.len()),
icon: IconName::ZedAssistant,
label: SharedString::from(
response
result
.description
.unwrap_or(format!("Result from {}", prompt_name)),
),

View File

@@ -13,6 +13,7 @@ path = "src/assistant_tool.rs"
[dependencies]
anyhow.workspace = true
async-watch.workspace = true
buffer_diff.workspace = true
clock.workspace = true
collections.workspace = true
@@ -29,7 +30,6 @@ serde.workspace = true
serde_json.workspace = true
text.workspace = true
util.workspace = true
watch.workspace = true
workspace.workspace = true
workspace-hack.workspace = true

View File

@@ -204,7 +204,7 @@ impl ActionLog {
git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
})?;
let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
let (git_diff_updates_tx, mut git_diff_updates_rx) = async_watch::channel(());
let _repo_subscription =
if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
cx.update(|cx| {

View File

@@ -214,7 +214,7 @@ pub trait Tool: 'static + Send + Sync {
ToolSource::Native
}
/// Returns true if the tool needs the users's confirmation
/// Returns true iff the tool needs the users's confirmation
/// before having permission to run.
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;

View File

@@ -46,19 +46,15 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
);
}
const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 5] = [
("format", |value| value.is_string()),
("additionalProperties", |value| value.is_boolean()),
("exclusiveMinimum", |value| value.is_number()),
("exclusiveMaximum", |value| value.is_number()),
("optional", |value| value.is_boolean()),
const KEYS_TO_REMOVE: [&str; 5] = [
"format",
"additionalProperties",
"exclusiveMinimum",
"exclusiveMaximum",
"optional",
];
for (key, predicate) in KEYS_TO_REMOVE {
if let Some(value) = obj.get(key) {
if predicate(value) {
obj.remove(key);
}
}
for key in KEYS_TO_REMOVE {
obj.remove(key);
}
// If a type is not specified for an input parameter, add a default type
@@ -157,24 +153,6 @@ mod tests {
"type": "integer"
})
);
// Ensure that we do not remove keys that are actually supported (e.g. "format" can just be used as another property)
let mut json = json!({
"description": "A test field",
"type": "integer",
"format": {},
});
adapt_to_json_schema_subset(&mut json).unwrap();
assert_eq!(
json,
json!({
"description": "A test field",
"type": "integer",
"format": {},
})
);
}
#[test]

View File

@@ -1,7 +1,7 @@
use std::sync::Arc;
use collections::{HashMap, IndexMap};
use gpui::App;
use collections::{HashMap, HashSet, IndexMap};
use gpui::{App, Context, EventEmitter};
use crate::{Tool, ToolRegistry, ToolSource};
@@ -13,9 +13,17 @@ pub struct ToolId(usize);
pub struct ToolWorkingSet {
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
enabled_sources: HashSet<ToolSource>,
enabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
next_tool_id: ToolId,
}
pub enum ToolWorkingSetEvent {
EnabledToolsChanged,
}
impl EventEmitter<ToolWorkingSetEvent> for ToolWorkingSet {}
impl ToolWorkingSet {
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
self.context_server_tools_by_name
@@ -49,6 +57,42 @@ impl ToolWorkingSet {
tools_by_source
}
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
let all_tools = self.tools(cx);
all_tools
.into_iter()
.filter(|tool| self.is_enabled(&tool.source(), &tool.name().into()))
.collect()
}
pub fn disable_all_tools(&mut self, cx: &mut Context<Self>) {
self.enabled_tools_by_source.clear();
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
}
pub fn enable_source(&mut self, source: ToolSource, cx: &mut Context<Self>) {
self.enabled_sources.insert(source.clone());
let tools_by_source = self.tools_by_source(cx);
if let Some(tools) = tools_by_source.get(&source) {
self.enabled_tools_by_source.insert(
source,
tools
.into_iter()
.map(|tool| tool.name().into())
.collect::<HashSet<_>>(),
);
}
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
}
pub fn disable_source(&mut self, source: &ToolSource, cx: &mut Context<Self>) {
self.enabled_sources.remove(source);
self.enabled_tools_by_source.remove(source);
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
}
pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
let tool_id = self.next_tool_id;
self.next_tool_id.0 += 1;
@@ -58,6 +102,42 @@ impl ToolWorkingSet {
tool_id
}
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
self.enabled_tools_by_source
.get(source)
.map_or(false, |enabled_tools| enabled_tools.contains(name))
}
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
!self.is_enabled(source, name)
}
pub fn enable(
&mut self,
source: ToolSource,
tools_to_enable: &[Arc<str>],
cx: &mut Context<Self>,
) {
self.enabled_tools_by_source
.entry(source)
.or_default()
.extend(tools_to_enable.into_iter().cloned());
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
}
pub fn disable(
&mut self,
source: ToolSource,
tools_to_disable: &[Arc<str>],
cx: &mut Context<Self>,
) {
self.enabled_tools_by_source
.entry(source)
.or_default()
.retain(|name| !tools_to_disable.contains(name));
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
}
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
self.context_server_tools_by_id
.retain(|id, _| !tool_ids_to_remove.contains(id));

View File

@@ -18,6 +18,7 @@ eval = []
agent_settings.workspace = true
anyhow.workspace = true
assistant_tool.workspace = true
async-watch.workspace = true
buffer_diff.workspace = true
chrono.workspace = true
collections.workspace = true
@@ -57,7 +58,6 @@ terminal_view.workspace = true
theme.workspace = true
ui.workspace = true
util.workspace = true
watch.workspace = true
web_search.workspace = true
which.workspace = true
workspace-hack.workspace = true
@@ -80,7 +80,6 @@ rand.workspace = true
pretty_assertions.workspace = true
reqwest_client.workspace = true
settings = { workspace = true, features = ["test-support"] }
smol.workspace = true
task = { workspace = true, features = ["test-support"]}
tempfile.workspace = true
theme.workspace = true

View File

@@ -420,12 +420,12 @@ impl EditAgent {
cx: &mut AsyncApp,
) -> (
Task<Result<(T, Vec<ResolvedOldText>)>>,
watch::Receiver<Option<Range<usize>>>,
async_watch::Receiver<Option<Range<usize>>>,
)
where
T: 'static + Send + Unpin + Stream<Item = Result<EditParserEvent>>,
{
let (mut old_range_tx, old_range_rx) = watch::channel(None);
let (old_range_tx, old_range_rx) = async_watch::channel(None);
let task = cx.background_spawn(async move {
let mut matcher = StreamingFuzzyMatcher::new(snapshot);
while let Some(edit_event) = edit_events.next().await {

View File

@@ -11,7 +11,7 @@ use client::{Client, UserStore};
use collections::HashMap;
use fs::FakeFs;
use futures::{FutureExt, future::LocalBoxFuture};
use gpui::{AppContext, TestAppContext, Timer};
use gpui::{AppContext, TestAppContext};
use indoc::{formatdoc, indoc};
use language_model::{
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult,
@@ -39,7 +39,7 @@ fn eval_extract_handle_command_output() {
// Model | Pass rate
// ----------------------------|----------
// claude-3.7-sonnet | 0.98
// gemini-2.5-pro-06-05 | 0.77
// gemini-2.5-pro | 0.86
// gemini-2.5-flash | 0.11
// gpt-4.1 | 1.00
@@ -58,7 +58,6 @@ fn eval_extract_handle_command_output() {
eval(
100,
0.7, // Taking the lower bar for Gemini
0.05,
EvalInput::from_conversation(
vec![
message(
@@ -117,7 +116,6 @@ fn eval_delete_run_git_blame() {
eval(
100,
0.95,
0.05,
EvalInput::from_conversation(
vec![
message(
@@ -180,7 +178,6 @@ fn eval_translate_doc_comments() {
eval(
200,
1.,
0.05,
EvalInput::from_conversation(
vec![
message(
@@ -244,7 +241,6 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
eval(
100,
0.95,
0.05,
EvalInput::from_conversation(
vec![
message(
@@ -369,7 +365,6 @@ fn eval_disable_cursor_blinking() {
eval(
100,
0.95,
0.05,
EvalInput::from_conversation(
vec![
message(User, [text("Let's research how to cursor blinking works.")]),
@@ -453,9 +448,6 @@ fn eval_from_pixels_constructor() {
eval(
100,
0.95,
// For whatever reason, this eval produces more mismatched tags.
// Increasing for now, let's see if we can bring this down.
0.2,
EvalInput::from_conversation(
vec![
message(
@@ -656,7 +648,6 @@ fn eval_zode() {
eval(
50,
1.,
0.05,
EvalInput::from_conversation(
vec![
message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]),
@@ -763,7 +754,6 @@ fn eval_add_overwrite_test() {
eval(
200,
0.5, // TODO: make this eval better
0.05,
EvalInput::from_conversation(
vec![
message(
@@ -1003,7 +993,6 @@ fn eval_create_empty_file() {
eval(
100,
0.99,
0.05,
EvalInput::from_conversation(
vec![
message(User, [text("Create a second empty todo file ")]),
@@ -1255,12 +1244,9 @@ impl EvalAssertion {
}],
..Default::default()
};
let mut response = retry_on_rate_limit(async || {
Ok(judge
.stream_completion_text(request.clone(), &cx.to_async())
.await?)
})
.await?;
let mut response = judge
.stream_completion_text(request, &cx.to_async())
.await?;
let mut output = String::new();
while let Some(chunk) = response.stream.next().await {
let chunk = chunk?;
@@ -1293,12 +1279,7 @@ impl EvalAssertion {
}
}
fn eval(
iterations: usize,
expected_pass_ratio: f32,
mismatched_tag_threshold: f32,
mut eval: EvalInput,
) {
fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
let mut evaluated_count = 0;
let mut failed_count = 0;
report_progress(evaluated_count, failed_count, iterations);
@@ -1311,17 +1292,10 @@ fn eval(
run_eval(eval.clone(), tx.clone());
let executor = gpui::background_executor();
let semaphore = Arc::new(smol::lock::Semaphore::new(32));
for _ in 1..iterations {
let eval = eval.clone();
let tx = tx.clone();
let semaphore = semaphore.clone();
executor
.spawn(async move {
let _guard = semaphore.acquire().await;
run_eval(eval, tx)
})
.detach();
executor.spawn(async move { run_eval(eval, tx) }).detach();
}
drop(tx);
@@ -1377,7 +1351,7 @@ fn eval(
let mismatched_tag_ratio =
cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32;
if mismatched_tag_ratio > mismatched_tag_threshold {
if mismatched_tag_ratio > 0.10 {
for eval_output in eval_outputs {
println!("{}", eval_output);
}
@@ -1587,31 +1561,21 @@ impl EditAgentTest {
if let Some(input_content) = eval.input_content.as_deref() {
buffer.update(cx, |buffer, cx| buffer.set_text(input_content, cx));
}
retry_on_rate_limit(async || {
self.agent
.edit(
buffer.clone(),
eval.edit_file_input.display_description.clone(),
&conversation,
&mut cx.to_async(),
)
.0
.await
})
.await?
let (edit_output, _) = self.agent.edit(
buffer.clone(),
eval.edit_file_input.display_description,
&conversation,
&mut cx.to_async(),
);
edit_output.await?
} else {
retry_on_rate_limit(async || {
self.agent
.overwrite(
buffer.clone(),
eval.edit_file_input.display_description.clone(),
&conversation,
&mut cx.to_async(),
)
.0
.await
})
.await?
let (edit_output, _) = self.agent.overwrite(
buffer.clone(),
eval.edit_file_input.display_description,
&conversation,
&mut cx.to_async(),
);
edit_output.await?
};
let buffer_text = buffer.read_with(cx, |buffer, _| buffer.text());
@@ -1633,26 +1597,6 @@ impl EditAgentTest {
}
}
async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) -> Result<R> {
loop {
match request().await {
Ok(result) => return Ok(result),
Err(err) => match err.downcast::<LanguageModelCompletionError>() {
Ok(err) => match err {
LanguageModelCompletionError::RateLimit(duration) => {
// Wait until after we are allowed to try again
eprintln!("Rate limit exceeded. Waiting for {duration:?}...",);
Timer::after(duration).await;
continue;
}
_ => return Err(err.into()),
},
Err(err) => return Err(err),
},
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
struct EvalAssertionOutcome {
score: usize,

View File

@@ -498,7 +498,7 @@ client.with_options(max_retries=5).messages.create(
### Timeouts
By default requests time out after 10 minutes. You can configure this with a `timeout` option,
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
```python
from anthropic import Anthropic

View File

@@ -638,36 +638,29 @@ impl ToolCard for TerminalToolCard {
.bg(cx.theme().colors().editor_background)
.rounded_b_md()
.text_ui_sm(cx)
.child({
let content_mode = terminal.read(cx).content_mode(window, cx);
if content_mode.is_scrollable() {
div().h_72().child(terminal.clone()).into_any_element()
} else {
ToolOutputPreview::new(
terminal.clone().into_any_element(),
terminal.entity_id(),
)
.with_total_lines(self.content_line_count)
.toggle_state(!content_mode.is_limited())
.on_toggle({
let terminal = terminal.clone();
move |is_expanded, _, cx| {
terminal.update(cx, |terminal, cx| {
terminal.set_embedded_mode(
if is_expanded {
None
} else {
Some(COLLAPSED_LINES)
},
cx,
);
});
}
})
.into_any_element()
}
}),
.child(
ToolOutputPreview::new(
terminal.clone().into_any_element(),
terminal.entity_id(),
)
.with_total_lines(self.content_line_count)
.toggle_state(!terminal.read(cx).is_content_limited(window))
.on_toggle({
let terminal = terminal.clone();
move |is_expanded, _, cx| {
terminal.update(cx, |terminal, cx| {
terminal.set_embedded_mode(
if is_expanded {
None
} else {
Some(COLLAPSED_LINES)
},
cx,
);
});
}
}),
),
)
},
)

View File

@@ -452,10 +452,6 @@ impl Model {
| Model::Claude3_5SonnetV2
| Model::Claude3_7Sonnet
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeOpus4
| Model::ClaudeOpus4Thinking
| Model::Claude3Haiku
| Model::Claude3Opus
| Model::Claude3Sonnet

View File

@@ -111,7 +111,7 @@ pub struct ChannelMembership {
pub role: proto::ChannelRole,
}
impl ChannelMembership {
pub fn sort_key(&self) -> MembershipSortKey<'_> {
pub fn sort_key(&self) -> MembershipSortKey {
MembershipSortKey {
role_order: match self.role {
proto::ChannelRole::Admin => 0,

View File

@@ -32,7 +32,7 @@ impl ChannelIndex {
.retain(|channel_id| !channels.contains(channel_id));
}
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard<'_> {
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard {
ChannelPathsInsertGuard {
channels_ordered: &mut self.channels_ordered,
channels_by_id: &mut self.channels_by_id,

View File

@@ -39,7 +39,7 @@ enum ProxyType<'t> {
HttpProxy(HttpProxyType<'t>),
}
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)> {
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType)> {
let scheme = proxy.scheme();
let host = proxy.host()?.to_string();
let port = proxy.port_or_known_default()?;

View File

@@ -80,6 +80,7 @@ zed_llm_client.workspace = true
agent_settings.workspace = true
assistant_context_editor.workspace = true
assistant_slash_command.workspace = true
assistant_tool.workspace = true
async-trait.workspace = true
audio.workspace = true
buffer_diff.workspace = true

View File

@@ -501,10 +501,8 @@ impl Database {
/// Returns all channels for the user with the given ID.
pub async fn get_channels_for_user(&self, user_id: UserId) -> Result<ChannelsForUser> {
self.weak_transaction(
|tx| async move { self.get_user_channels(user_id, None, true, &tx).await },
)
.await
self.transaction(|tx| async move { self.get_user_channels(user_id, None, true, &tx).await })
.await
}
/// Returns all channels for the user with the given ID that are descendants

View File

@@ -15,7 +15,7 @@ impl Database {
user_b_busy: bool,
}
self.weak_transaction(|tx| async move {
self.transaction(|tx| async move {
let user_a_participant = Alias::new("user_a_participant");
let user_b_participant = Alias::new("user_b_participant");
let mut db_contacts = contact::Entity::find()
@@ -91,7 +91,7 @@ impl Database {
/// Returns whether the given user is a busy (on a call).
pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
self.weak_transaction(|tx| async move {
self.transaction(|tx| async move {
let participant = room_participant::Entity::find()
.filter(room_participant::Column::UserId.eq(user_id))
.one(&*tx)

View File

@@ -80,7 +80,7 @@ impl Database {
&self,
user_id: UserId,
) -> Result<Option<proto::IncomingCall>> {
self.weak_transaction(|tx| async move {
self.transaction(|tx| async move {
let pending_participant = room_participant::Entity::find()
.filter(
room_participant::Column::UserId

View File

@@ -7,12 +7,6 @@ pub use token::*;
pub const AGENT_EXTENDED_TRIAL_FEATURE_FLAG: &str = "agent-extended-trial";
/// The name of the feature flag that bypasses the account age check.
pub const BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG: &str = "bypass-account-age-check";
/// The minimum account age an account must have in order to use the LLM service.
pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
/// The default value to use for maximum spend per month if the user did not
/// explicitly set a maximum spend.
///

View File

@@ -1,6 +1,6 @@
use crate::db::billing_subscription::SubscriptionKind;
use crate::db::{billing_customer, billing_subscription, user};
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG};
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
use crate::{Config, db::billing_preference};
use anyhow::{Context as _, Result};
use chrono::{NaiveDateTime, Utc};
@@ -84,7 +84,7 @@ impl LlmTokenClaims {
.any(|flag| flag == "llm-closed-beta"),
bypass_account_age_check: feature_flags
.iter()
.any(|flag| flag == BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG),
.any(|flag| flag == "bypass-account-age-check"),
can_use_web_search_tool: true,
use_llm_request_queue: feature_flags.iter().any(|flag| flag == "llm-request-queue"),
plan,

View File

@@ -4,10 +4,7 @@ use crate::api::billing::find_or_create_billing_customer;
use crate::api::{CloudflareIpCountryHeader, SystemIdHeader};
use crate::db::billing_subscription::SubscriptionKind;
use crate::llm::db::LlmDatabase;
use crate::llm::{
AGENT_EXTENDED_TRIAL_FEATURE_FLAG, BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG, LlmTokenClaims,
MIN_ACCOUNT_AGE_FOR_LLM_USE,
};
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, LlmTokenClaims};
use crate::stripe_client::StripeCustomerId;
use crate::{
AppState, Error, Result, auth,
@@ -68,7 +65,7 @@ use std::{
rc::Rc,
sync::{
Arc, OnceLock,
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
atomic::{AtomicBool, Ordering::SeqCst},
},
time::{Duration, Instant},
};
@@ -89,36 +86,10 @@ pub const CLEANUP_TIMEOUT: Duration = Duration::from_secs(15);
const MESSAGE_COUNT_PER_PAGE: usize = 100;
const MAX_MESSAGE_LEN: usize = 1024;
const NOTIFICATION_COUNT_PER_PAGE: usize = 50;
const MAX_CONCURRENT_CONNECTIONS: usize = 512;
static CONCURRENT_CONNECTIONS: AtomicUsize = AtomicUsize::new(0);
type MessageHandler =
Box<dyn Send + Sync + Fn(Box<dyn AnyTypedEnvelope>, Session) -> BoxFuture<'static, ()>>;
pub struct ConnectionGuard;
impl ConnectionGuard {
pub fn try_acquire() -> Result<Self, ()> {
let current_connections = CONCURRENT_CONNECTIONS.fetch_add(1, SeqCst);
if current_connections >= MAX_CONCURRENT_CONNECTIONS {
CONCURRENT_CONNECTIONS.fetch_sub(1, SeqCst);
tracing::error!(
"too many concurrent connections: {}",
current_connections + 1
);
return Err(());
}
Ok(ConnectionGuard)
}
}
impl Drop for ConnectionGuard {
fn drop(&mut self) {
CONCURRENT_CONNECTIONS.fetch_sub(1, SeqCst);
}
}
struct Response<R> {
peer: Arc<Peer>,
receipt: Receipt<R>,
@@ -341,7 +312,6 @@ impl Server {
.add_request_handler(
forward_read_only_project_request::<proto::LanguageServerIdForName>,
)
.add_request_handler(forward_read_only_project_request::<proto::GetDocumentDiagnostics>)
.add_request_handler(
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
)
@@ -384,9 +354,6 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
.add_message_handler(
broadcast_project_message_from_host::<proto::PullWorkspaceDiagnostics>,
)
.add_request_handler(get_users)
.add_request_handler(fuzzy_search_users)
.add_request_handler(request_contact)
@@ -751,7 +718,6 @@ impl Server {
system_id: Option<String>,
send_connection_id: Option<oneshot::Sender<ConnectionId>>,
executor: Executor,
connection_guard: Option<ConnectionGuard>,
) -> impl Future<Output = ()> + use<> {
let this = self.clone();
let span = info_span!("handle connection", %address,
@@ -772,7 +738,6 @@ impl Server {
tracing::error!("server is tearing down");
return
}
let (connection_id, handle_io, mut incoming_rx) = this
.peer
.add_connection(connection, {
@@ -814,7 +779,6 @@ impl Server {
tracing::error!(?error, "failed to send initial client update");
return;
}
drop(connection_guard);
let handle_io = handle_io.fuse();
futures::pin_mut!(handle_io);
@@ -1186,19 +1150,6 @@ pub async fn handle_websocket_request(
}
let socket_address = socket_address.to_string();
// Acquire connection guard before WebSocket upgrade
let connection_guard = match ConnectionGuard::try_acquire() {
Ok(guard) => guard,
Err(()) => {
return (
StatusCode::SERVICE_UNAVAILABLE,
"Too many concurrent connections",
)
.into_response();
}
};
ws.on_upgrade(move |socket| {
let socket = socket
.map_ok(to_tungstenite_message)
@@ -1216,7 +1167,6 @@ pub async fn handle_websocket_request(
system_id_header.map(|header| header.to_string()),
None,
Executor::Production,
Some(connection_guard),
)
.await;
}
@@ -2819,12 +2769,8 @@ async fn make_update_user_plan_message(
(None, None)
};
let bypass_account_age_check = feature_flags
.iter()
.any(|flag| flag == BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG);
let account_too_young = !matches!(plan, proto::Plan::ZedPro)
&& !bypass_account_age_check
&& user.account_age() < MIN_ACCOUNT_AGE_FOR_LLM_USE;
let account_too_young =
!matches!(plan, proto::Plan::ZedPro) && user.account_age() < MIN_ACCOUNT_AGE_FOR_LLM_USE;
Ok(proto::UpdateUserPlan {
plan: plan.into(),
@@ -4125,6 +4071,9 @@ async fn accept_terms_of_service(
Ok(())
}
/// The minimum account age an account must have in order to use the LLM service.
pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
async fn get_llm_api_token(
_request: proto::GetLlmToken,
response: Response<proto::GetLlmToken>,

View File

@@ -7,7 +7,7 @@ use editor::{
Editor, RowInfo,
actions::{
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst,
ExpandMacroRecursively, Redo, Rename, SelectAll, ToggleCodeActions, Undo,
ExpandMacroRecursively, Redo, Rename, ToggleCodeActions, Undo,
},
test::{
editor_test_context::{AssertionContextManager, EditorTestContext},
@@ -2712,7 +2712,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
params.text_document.uri,
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(params.position, lsp::Position::new(0, 0));
assert_eq!(params.position, lsp::Position::new(0, 0),);
Ok(Some(ExpandedMacro {
name: "test_macro_name".to_string(),
expansion: "test_macro_expansion on the host".to_string(),
@@ -2747,11 +2747,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
params.text_document.uri,
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
);
assert_eq!(
params.position,
lsp::Position::new(0, 12),
"editor_b has selected the entire text and should query for a different position"
);
assert_eq!(params.position, lsp::Position::new(0, 0),);
Ok(Some(ExpandedMacro {
name: "test_macro_name".to_string(),
expansion: "test_macro_expansion on the client".to_string(),
@@ -2760,7 +2756,6 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
);
editor_b.update_in(cx_b, |editor, window, cx| {
editor.select_all(&SelectAll, window, cx);
expand_macro_recursively(editor, &ExpandMacroRecursively, window, cx)
});
expand_request_b.next().await.unwrap();

View File

@@ -20,8 +20,8 @@ use gpui::{
UpdateGlobal, px, size,
};
use language::{
Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig,
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
LineEnding, OffsetRangeExt, Point, Rope,
language_settings::{
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
},
@@ -4237,8 +4237,7 @@ async fn test_collaborating_with_diagnostics(
message: "message 1".to_string(),
severity: lsp::DiagnosticSeverity::ERROR,
is_primary: true,
source_kind: DiagnosticSourceKind::Pushed,
..Diagnostic::default()
..Default::default()
}
},
DiagnosticEntry {
@@ -4248,8 +4247,7 @@ async fn test_collaborating_with_diagnostics(
severity: lsp::DiagnosticSeverity::WARNING,
message: "message 2".to_string(),
is_primary: true,
source_kind: DiagnosticSourceKind::Pushed,
..Diagnostic::default()
..Default::default()
}
}
]
@@ -4261,7 +4259,7 @@ async fn test_collaborating_with_diagnostics(
&lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
version: None,
diagnostics: Vec::new(),
diagnostics: vec![],
},
);
executor.run_until_parked();

View File

@@ -258,7 +258,6 @@ impl TestServer {
None,
Some(connection_id_tx),
Executor::Deterministic(cx.background_executor().clone()),
None,
))
.detach();
let connection_id = connection_id_rx.await.map_err(|e| {

View File

@@ -15,6 +15,7 @@ use language::{
use project::{Completion, CompletionResponse, CompletionSource, search::SearchQuery};
use settings::Settings;
use std::{
cell::RefCell,
ops::Range,
rc::Rc,
sync::{Arc, LazyLock},
@@ -72,6 +73,16 @@ impl CompletionProvider for MessageEditorCompletionProvider {
})
}
fn resolve_completions(
&self,
_buffer: Entity<Buffer>,
_completion_indices: Vec<usize>,
_completions: Rc<RefCell<Box<[Completion]>>>,
_cx: &mut Context<Editor>,
) -> Task<anyhow::Result<bool>> {
Task::ready(Ok(false))
}
fn is_completion_trigger(
&self,
_buffer: &Entity<Buffer>,
@@ -244,7 +255,7 @@ impl MessageEditor {
{
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
let completion_response = Self::completions_for_candidates(
let completion_response = Self::resolve_completions_for_candidates(
&cx,
query.as_str(),
&candidates,
@@ -262,7 +273,7 @@ impl MessageEditor {
{
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
let completion_response = Self::completions_for_candidates(
let completion_response = Self::resolve_completions_for_candidates(
&cx,
query.as_str(),
candidates,
@@ -281,7 +292,7 @@ impl MessageEditor {
}]))
}
async fn completions_for_candidates(
async fn resolve_completions_for_candidates(
cx: &AsyncApp,
query: &str,
candidates: &[StringMatchCandidate],

View File

@@ -11,9 +11,6 @@ workspace = true
[lib]
path = "src/context_server.rs"
[features]
test-support = []
[dependencies]
anyhow.workspace = true
async-trait.workspace = true

View File

@@ -1,7 +1,5 @@
pub mod client;
pub mod protocol;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
pub mod transport;
pub mod types;

View File

@@ -6,9 +6,10 @@
//! of messages.
use anyhow::Result;
use collections::HashMap;
use crate::client::Client;
use crate::types::{self, Notification, Request};
use crate::types;
pub struct ModelContextProtocol {
inner: Client,
@@ -20,10 +21,9 @@ impl ModelContextProtocol {
}
fn supported_protocols() -> Vec<types::ProtocolVersion> {
vec![
types::ProtocolVersion(types::LATEST_PROTOCOL_VERSION.to_string()),
types::ProtocolVersion(types::VERSION_2024_11_05.to_string()),
]
vec![types::ProtocolVersion(
types::LATEST_PROTOCOL_VERSION.to_string(),
)]
}
pub async fn initialize(
@@ -43,7 +43,7 @@ impl ModelContextProtocol {
let response: types::InitializeResponse = self
.inner
.request(types::requests::Initialize::METHOD, params)
.request(types::RequestType::Initialize.as_str(), params)
.await?;
anyhow::ensure!(
@@ -54,13 +54,16 @@ impl ModelContextProtocol {
log::trace!("mcp server info {:?}", response.server_info);
self.inner.notify(
types::NotificationType::Initialized.as_str(),
serde_json::json!({}),
)?;
let initialized_protocol = InitializedContextServerProtocol {
inner: self.inner,
initialize: response,
};
initialized_protocol.notify::<types::notifications::Initialized>(())?;
Ok(initialized_protocol)
}
}
@@ -91,11 +94,137 @@ impl InitializedContextServerProtocol {
}
}
pub async fn request<T: Request>(&self, params: T::Params) -> Result<T::Response> {
self.inner.request(T::METHOD, params).await
fn check_capability(&self, capability: ServerCapability) -> Result<()> {
anyhow::ensure!(
self.capable(capability),
"Server does not support {capability:?} capability"
);
Ok(())
}
pub fn notify<T: Notification>(&self, params: T::Params) -> Result<()> {
self.inner.notify(T::METHOD, params)
/// List the MCP prompts.
pub async fn list_prompts(&self) -> Result<Vec<types::Prompt>> {
self.check_capability(ServerCapability::Prompts)?;
let response: types::PromptsListResponse = self
.inner
.request(
types::RequestType::PromptsList.as_str(),
serde_json::json!({}),
)
.await?;
Ok(response.prompts)
}
/// List the MCP resources.
pub async fn list_resources(&self) -> Result<types::ResourcesListResponse> {
self.check_capability(ServerCapability::Resources)?;
let response: types::ResourcesListResponse = self
.inner
.request(
types::RequestType::ResourcesList.as_str(),
serde_json::json!({}),
)
.await?;
Ok(response)
}
/// Executes a prompt with the given arguments and returns the result.
pub async fn run_prompt<P: AsRef<str>>(
&self,
prompt: P,
arguments: HashMap<String, String>,
) -> Result<types::PromptsGetResponse> {
self.check_capability(ServerCapability::Prompts)?;
let params = types::PromptsGetParams {
name: prompt.as_ref().to_string(),
arguments: Some(arguments),
meta: None,
};
let response: types::PromptsGetResponse = self
.inner
.request(types::RequestType::PromptsGet.as_str(), params)
.await?;
Ok(response)
}
pub async fn completion<P: Into<String>>(
&self,
reference: types::CompletionReference,
argument: P,
value: P,
) -> Result<types::Completion> {
let params = types::CompletionCompleteParams {
r#ref: reference,
argument: types::CompletionArgument {
name: argument.into(),
value: value.into(),
},
meta: None,
};
let result: types::CompletionCompleteResponse = self
.inner
.request(types::RequestType::CompletionComplete.as_str(), params)
.await?;
let completion = types::Completion {
values: result.completion.values,
total: types::CompletionTotal::from_options(
result.completion.has_more,
result.completion.total,
),
};
Ok(completion)
}
/// List MCP tools.
pub async fn list_tools(&self) -> Result<types::ListToolsResponse> {
self.check_capability(ServerCapability::Tools)?;
let response = self
.inner
.request::<types::ListToolsResponse>(types::RequestType::ListTools.as_str(), ())
.await?;
Ok(response)
}
/// Executes a tool with the given arguments
pub async fn run_tool<P: AsRef<str>>(
&self,
tool: P,
arguments: Option<HashMap<String, serde_json::Value>>,
) -> Result<types::CallToolResponse> {
self.check_capability(ServerCapability::Tools)?;
let params = types::CallToolParams {
name: tool.as_ref().to_string(),
arguments,
meta: None,
};
let response: types::CallToolResponse = self
.inner
.request(types::RequestType::CallTool.as_str(), params)
.await?;
Ok(response)
}
}
impl InitializedContextServerProtocol {
pub async fn request<R: serde::de::DeserializeOwned>(
&self,
method: &str,
params: impl serde::Serialize,
) -> Result<R> {
self.inner.request(method, params).await
}
}

View File

@@ -1,118 +0,0 @@
use anyhow::Context as _;
use collections::HashMap;
use futures::{Stream, StreamExt as _, lock::Mutex};
use gpui::BackgroundExecutor;
use std::{pin::Pin, sync::Arc};
use crate::{
transport::Transport,
types::{Implementation, InitializeResponse, ProtocolVersion, ServerCapabilities},
};
pub fn create_fake_transport(
name: impl Into<String>,
executor: BackgroundExecutor,
) -> FakeTransport {
let name = name.into();
FakeTransport::new(executor).on_request::<crate::types::requests::Initialize>(move |_params| {
create_initialize_response(name.clone())
})
}
fn create_initialize_response(server_name: String) -> InitializeResponse {
InitializeResponse {
protocol_version: ProtocolVersion(crate::types::LATEST_PROTOCOL_VERSION.to_string()),
server_info: Implementation {
name: server_name,
version: "1.0.0".to_string(),
},
capabilities: ServerCapabilities::default(),
meta: None,
}
}
pub struct FakeTransport {
request_handlers:
HashMap<&'static str, Arc<dyn Fn(serde_json::Value) -> serde_json::Value + Send + Sync>>,
tx: futures::channel::mpsc::UnboundedSender<String>,
rx: Arc<Mutex<futures::channel::mpsc::UnboundedReceiver<String>>>,
executor: BackgroundExecutor,
}
impl FakeTransport {
pub fn new(executor: BackgroundExecutor) -> Self {
let (tx, rx) = futures::channel::mpsc::unbounded();
Self {
request_handlers: Default::default(),
tx,
rx: Arc::new(Mutex::new(rx)),
executor,
}
}
pub fn on_request<T: crate::types::Request>(
mut self,
handler: impl Fn(T::Params) -> T::Response + Send + Sync + 'static,
) -> Self {
self.request_handlers.insert(
T::METHOD,
Arc::new(move |value| {
let params = value.get("params").expect("Missing parameters").clone();
let params: T::Params =
serde_json::from_value(params).expect("Invalid parameters received");
let response = handler(params);
serde_json::to_value(response).unwrap()
}),
);
self
}
}
#[async_trait::async_trait]
impl Transport for FakeTransport {
async fn send(&self, message: String) -> anyhow::Result<()> {
if let Ok(msg) = serde_json::from_str::<serde_json::Value>(&message) {
let id = msg.get("id").and_then(|id| id.as_u64()).unwrap_or(0);
if let Some(method) = msg.get("method") {
let method = method.as_str().expect("Invalid method received");
if let Some(handler) = self.request_handlers.get(method) {
let payload = handler(msg);
let response = serde_json::json!({
"jsonrpc": "2.0",
"id": id,
"result": payload
});
self.tx
.unbounded_send(response.to_string())
.context("sending a message")?;
} else {
log::debug!("No handler registered for MCP request '{method}'");
}
}
}
Ok(())
}
fn receive(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
let rx = self.rx.clone();
let executor = self.executor.clone();
Box::pin(futures::stream::unfold(rx, move |rx| {
let executor = executor.clone();
async move {
let mut rx_guard = rx.lock().await;
executor.simulate_random_delay().await;
if let Some(message) = rx_guard.next().await {
drop(rx_guard);
Some((message, rx))
} else {
None
}
}
}))
}
fn receive_err(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
Box::pin(futures::stream::empty())
}
}

View File

@@ -1,144 +1,76 @@
use collections::HashMap;
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use url::Url;
pub const LATEST_PROTOCOL_VERSION: &str = "2025-03-26";
pub const VERSION_2024_11_05: &str = "2024-11-05";
pub const LATEST_PROTOCOL_VERSION: &str = "2024-11-05";
pub mod requests {
use super::*;
pub enum RequestType {
Initialize,
CallTool,
ResourcesUnsubscribe,
ResourcesSubscribe,
ResourcesRead,
ResourcesList,
LoggingSetLevel,
PromptsGet,
PromptsList,
CompletionComplete,
Ping,
ListTools,
ListResourceTemplates,
ListRoots,
}
macro_rules! request {
($method:expr, $name:ident, $params:ty, $response:ty) => {
pub struct $name;
impl Request for $name {
type Params = $params;
type Response = $response;
const METHOD: &'static str = $method;
}
};
impl RequestType {
pub fn as_str(&self) -> &'static str {
match self {
RequestType::Initialize => "initialize",
RequestType::CallTool => "tools/call",
RequestType::ResourcesUnsubscribe => "resources/unsubscribe",
RequestType::ResourcesSubscribe => "resources/subscribe",
RequestType::ResourcesRead => "resources/read",
RequestType::ResourcesList => "resources/list",
RequestType::LoggingSetLevel => "logging/setLevel",
RequestType::PromptsGet => "prompts/get",
RequestType::PromptsList => "prompts/list",
RequestType::CompletionComplete => "completion/complete",
RequestType::Ping => "ping",
RequestType::ListTools => "tools/list",
RequestType::ListResourceTemplates => "resources/templates/list",
RequestType::ListRoots => "roots/list",
}
}
request!(
"initialize",
Initialize,
InitializeParams,
InitializeResponse
);
request!("tools/call", CallTool, CallToolParams, CallToolResponse);
request!(
"resources/unsubscribe",
ResourcesUnsubscribe,
ResourcesUnsubscribeParams,
()
);
request!(
"resources/subscribe",
ResourcesSubscribe,
ResourcesSubscribeParams,
()
);
request!(
"resources/read",
ResourcesRead,
ResourcesReadParams,
ResourcesReadResponse
);
request!("resources/list", ResourcesList, (), ResourcesListResponse);
request!(
"logging/setLevel",
LoggingSetLevel,
LoggingSetLevelParams,
()
);
request!(
"prompts/get",
PromptsGet,
PromptsGetParams,
PromptsGetResponse
);
request!("prompts/list", PromptsList, (), PromptsListResponse);
request!(
"completion/complete",
CompletionComplete,
CompletionCompleteParams,
CompletionCompleteResponse
);
request!("ping", Ping, (), ());
request!("tools/list", ListTools, (), ListToolsResponse);
request!(
"resources/templates/list",
ListResourceTemplates,
(),
ListResourceTemplatesResponse
);
request!("roots/list", ListRoots, (), ListRootsResponse);
}
pub trait Request {
type Params: DeserializeOwned + Serialize + Send + Sync + 'static;
type Response: DeserializeOwned + Serialize + Send + Sync + 'static;
const METHOD: &'static str;
}
impl TryFrom<&str> for RequestType {
type Error = ();
pub mod notifications {
use super::*;
macro_rules! notification {
($method:expr, $name:ident, $params:ty) => {
pub struct $name;
impl Notification for $name {
type Params = $params;
const METHOD: &'static str = $method;
}
};
fn try_from(s: &str) -> Result<Self, Self::Error> {
match s {
"initialize" => Ok(RequestType::Initialize),
"tools/call" => Ok(RequestType::CallTool),
"resources/unsubscribe" => Ok(RequestType::ResourcesUnsubscribe),
"resources/subscribe" => Ok(RequestType::ResourcesSubscribe),
"resources/read" => Ok(RequestType::ResourcesRead),
"resources/list" => Ok(RequestType::ResourcesList),
"logging/setLevel" => Ok(RequestType::LoggingSetLevel),
"prompts/get" => Ok(RequestType::PromptsGet),
"prompts/list" => Ok(RequestType::PromptsList),
"completion/complete" => Ok(RequestType::CompletionComplete),
"ping" => Ok(RequestType::Ping),
"tools/list" => Ok(RequestType::ListTools),
"resources/templates/list" => Ok(RequestType::ListResourceTemplates),
"roots/list" => Ok(RequestType::ListRoots),
_ => Err(()),
}
}
notification!("notifications/initialized", Initialized, ());
notification!("notifications/progress", Progress, ProgressParams);
notification!("notifications/message", Message, MessageParams);
notification!(
"notifications/resources/updated",
ResourcesUpdated,
ResourcesUpdatedParams
);
notification!(
"notifications/resources/list_changed",
ResourcesListChanged,
()
);
notification!("notifications/tools/list_changed", ToolsListChanged, ());
notification!("notifications/prompts/list_changed", PromptsListChanged, ());
notification!("notifications/roots/list_changed", RootsListChanged, ());
}
pub trait Notification {
type Params: DeserializeOwned + Serialize + Send + Sync + 'static;
const METHOD: &'static str;
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MessageParams {
pub level: LoggingLevel,
pub logger: Option<String>,
pub data: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesUpdatedParams {
pub uri: String,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct ProtocolVersion(pub String);
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct InitializeParams {
pub protocol_version: ProtocolVersion,
@@ -148,7 +80,7 @@ pub struct InitializeParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CallToolParams {
pub name: String,
@@ -158,7 +90,7 @@ pub struct CallToolParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesUnsubscribeParams {
pub uri: Url,
@@ -166,7 +98,7 @@ pub struct ResourcesUnsubscribeParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesSubscribeParams {
pub uri: Url,
@@ -174,7 +106,7 @@ pub struct ResourcesSubscribeParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesReadParams {
pub uri: Url,
@@ -182,7 +114,7 @@ pub struct ResourcesReadParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct LoggingSetLevelParams {
pub level: LoggingLevel,
@@ -190,7 +122,7 @@ pub struct LoggingSetLevelParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptsGetParams {
pub name: String,
@@ -200,40 +132,37 @@ pub struct PromptsGetParams {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionCompleteParams {
#[serde(rename = "ref")]
pub reference: CompletionReference,
pub r#ref: CompletionReference,
pub argument: CompletionArgument,
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum CompletionReference {
Prompt(PromptReference),
Resource(ResourceReference),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptReference {
#[serde(rename = "type")]
pub ty: PromptReferenceType,
pub r#type: PromptReferenceType,
pub name: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourceReference {
#[serde(rename = "type")]
pub ty: PromptReferenceType,
pub r#type: PromptReferenceType,
pub uri: Url,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum PromptReferenceType {
#[serde(rename = "ref/prompt")]
@@ -242,7 +171,7 @@ pub enum PromptReferenceType {
Resource,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionArgument {
pub name: String,
@@ -259,7 +188,7 @@ pub struct InitializeResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesReadResponse {
pub contents: Vec<ResourceContentsType>,
@@ -267,14 +196,14 @@ pub struct ResourcesReadResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub enum ResourceContentsType {
Text(TextResourceContents),
Blob(BlobResourceContents),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResourcesListResponse {
pub resources: Vec<Resource>,
@@ -291,7 +220,7 @@ pub struct SamplingMessage {
pub content: MessageContent,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateMessageRequest {
pub messages: Vec<SamplingMessage>,
@@ -343,20 +272,13 @@ pub enum MessageContent {
#[serde(skip_serializing_if = "Option::is_none")]
annotations: Option<MessageAnnotations>,
},
#[serde(rename = "image", rename_all = "camelCase")]
#[serde(rename = "image")]
Image {
data: String,
mime_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
annotations: Option<MessageAnnotations>,
},
#[serde(rename = "audio", rename_all = "camelCase")]
Audio {
data: String,
mime_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
annotations: Option<MessageAnnotations>,
},
#[serde(rename = "resource")]
Resource {
resource: ResourceContents,
@@ -374,7 +296,7 @@ pub struct MessageAnnotations {
pub priority: Option<f64>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptsGetResponse {
#[serde(skip_serializing_if = "Option::is_none")]
@@ -384,7 +306,7 @@ pub struct PromptsGetResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptsListResponse {
pub prompts: Vec<Prompt>,
@@ -394,7 +316,7 @@ pub struct PromptsListResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionCompleteResponse {
pub completion: CompletionResult,
@@ -402,7 +324,7 @@ pub struct CompletionCompleteResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CompletionResult {
pub values: Vec<String>,
@@ -414,7 +336,7 @@ pub struct CompletionResult {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Prompt {
pub name: String,
@@ -424,7 +346,7 @@ pub struct Prompt {
pub arguments: Option<Vec<PromptArgument>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptArgument {
pub name: String,
@@ -453,8 +375,6 @@ pub struct ServerCapabilities {
#[serde(skip_serializing_if = "Option::is_none")]
pub logging: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub completions: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub prompts: Option<PromptsCapabilities>,
#[serde(skip_serializing_if = "Option::is_none")]
pub resources: Option<ResourcesCapabilities>,
@@ -499,28 +419,6 @@ pub struct Tool {
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
pub input_schema: serde_json::Value,
#[serde(skip_serializing_if = "Option::is_none")]
pub annotations: Option<ToolAnnotations>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ToolAnnotations {
/// A human-readable title for the tool.
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
/// If true, the tool does not modify its environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub read_only_hint: Option<bool>,
/// If true, the tool may perform destructive updates to its environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub destructive_hint: Option<bool>,
/// If true, calling the tool repeatedly with the same arguments will have no additional effect on its environment.
#[serde(skip_serializing_if = "Option::is_none")]
pub idempotent_hint: Option<bool>,
/// If true, this tool may interact with an "open world" of external entities.
#[serde(skip_serializing_if = "Option::is_none")]
pub open_world_hint: Option<bool>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -611,6 +509,34 @@ pub struct ModelHint {
pub name: Option<String>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum NotificationType {
Initialized,
Progress,
Message,
ResourcesUpdated,
ResourcesListChanged,
ToolsListChanged,
PromptsListChanged,
RootsListChanged,
}
impl NotificationType {
pub fn as_str(&self) -> &'static str {
match self {
NotificationType::Initialized => "notifications/initialized",
NotificationType::Progress => "notifications/progress",
NotificationType::Message => "notifications/message",
NotificationType::ResourcesUpdated => "notifications/resources/updated",
NotificationType::ResourcesListChanged => "notifications/resources/list_changed",
NotificationType::ToolsListChanged => "notifications/tools/list_changed",
NotificationType::PromptsListChanged => "notifications/prompts/list_changed",
NotificationType::RootsListChanged => "notifications/roots/list_changed",
}
}
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
pub enum ClientNotification {
@@ -631,14 +557,12 @@ pub enum ProgressToken {
Number(f64),
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ProgressParams {
pub progress_token: ProgressToken,
pub progress: f64,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub total: Option<f64>,
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
pub meta: Option<HashMap<String, serde_json::Value>>,
@@ -665,7 +589,7 @@ pub struct Completion {
pub total: CompletionTotal,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CallToolResponse {
pub content: Vec<ToolResponseContent>,
@@ -682,8 +606,6 @@ pub enum ToolResponseContent {
Text { text: String },
#[serde(rename = "image", rename_all = "camelCase")]
Image { data: String, mime_type: String },
#[serde(rename = "audio", rename_all = "camelCase")]
Audio { data: String, mime_type: String },
#[serde(rename = "resource")]
Resource { resource: ResourceContents },
}
@@ -698,7 +620,7 @@ pub struct ListToolsResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ListResourceTemplatesResponse {
pub resource_templates: Vec<ResourceTemplate>,
@@ -708,7 +630,7 @@ pub struct ListResourceTemplatesResponse {
pub meta: Option<HashMap<String, serde_json::Value>>,
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ListRootsResponse {
pub roots: Vec<Root>,

View File

@@ -408,30 +408,24 @@ impl Copilot {
let proxy_url = copilot_settings.proxy.clone()?;
let no_verify = copilot_settings.proxy_no_verify;
let http_or_https_proxy = if proxy_url.starts_with("http:") {
Some("HTTP_PROXY")
"HTTP_PROXY"
} else if proxy_url.starts_with("https:") {
Some("HTTPS_PROXY")
"HTTPS_PROXY"
} else {
log::error!(
"Unsupported protocol scheme for language server proxy (must be http or https)"
);
None
return None;
};
let mut env = HashMap::default();
env.insert(http_or_https_proxy.to_string(), proxy_url);
if let Some(proxy_type) = http_or_https_proxy {
env.insert(proxy_type.to_string(), proxy_url);
if let Some(true) = no_verify {
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
};
}
if let Some(true) = no_verify {
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
};
if let Ok(oauth_token) = env::var(copilot_chat::COPILOT_OAUTH_ENV_VAR) {
env.insert(copilot_chat::COPILOT_OAUTH_ENV_VAR.to_string(), oauth_token);
}
if env.is_empty() { None } else { Some(env) }
Some(env)
}
#[cfg(any(test, feature = "test-support"))]
@@ -526,7 +520,7 @@ impl Copilot {
let server = cx
.update(|cx| {
let mut params = server.default_initialize_params(false, cx);
let mut params = server.default_initialize_params(cx);
params.initialization_options = Some(editor_info_json);
server.initialize(params, configuration.into(), cx)
})?

View File

@@ -8,7 +8,6 @@ use chrono::DateTime;
use collections::HashSet;
use fs::Fs;
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use gpui::WeakEntity;
use gpui::{App, AsyncApp, Global, prelude::*};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
use itertools::Itertools;
@@ -16,14 +15,9 @@ use paths::home_dir;
use serde::{Deserialize, Serialize};
use settings::watch_config_dir;
pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
#[derive(Default, Clone, Debug, PartialEq)]
pub struct CopilotChatSettings {
pub api_url: Arc<str>,
pub auth_url: Arc<str>,
pub models_url: Arc<str>,
}
pub const COPILOT_CHAT_COMPLETION_URL: &str = "https://api.githubcopilot.com/chat/completions";
pub const COPILOT_CHAT_AUTH_URL: &str = "https://api.github.com/copilot_internal/v2/token";
pub const COPILOT_CHAT_MODELS_URL: &str = "https://api.githubcopilot.com/models";
// Copilot's base model; defined by Microsoft in premium requests table
// This will be moved to the front of the Copilot model list, and will be used for
@@ -346,7 +340,6 @@ impl Global for GlobalCopilotChat {}
pub struct CopilotChat {
oauth_token: Option<String>,
api_token: Option<ApiToken>,
settings: CopilotChatSettings,
models: Option<Vec<Model>>,
client: Arc<dyn HttpClient>,
}
@@ -380,77 +373,62 @@ impl CopilotChat {
.map(|model| model.0.clone())
}
fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut Context<Self>) -> Self {
pub fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &App) -> Self {
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
let dir_path = copilot_chat_config_dir();
let settings = CopilotChatSettings::default();
cx.spawn(async move |this, cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),
dir_path.clone(),
config_paths,
);
while let Some(contents) = parent_watch_rx.next().await {
let oauth_token = extract_oauth_token(contents);
this.update(cx, |this, cx| {
this.oauth_token = oauth_token.clone();
cx.notify();
})?;
cx.spawn({
let client = client.clone();
async move |cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),
dir_path.clone(),
config_paths,
);
while let Some(contents) = parent_watch_rx.next().await {
let oauth_token = extract_oauth_token(contents);
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.oauth_token = oauth_token.clone();
cx.notify();
});
}
})?;
if oauth_token.is_some() {
Self::update_models(&this, cx).await?;
if let Some(ref oauth_token) = oauth_token {
let api_token = request_api_token(oauth_token, client.clone()).await?;
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.api_token = Some(api_token.clone());
cx.notify();
});
}
})?;
let models = get_models(api_token.api_key, client.clone()).await?;
cx.update(|cx| {
if let Some(this) = Self::global(cx).as_ref() {
this.update(cx, |this, cx| {
this.models = Some(models);
cx.notify();
});
}
})?;
}
}
anyhow::Ok(())
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
let this = Self {
oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
Self {
oauth_token: None,
api_token: None,
models: None,
settings,
client,
};
if this.oauth_token.is_some() {
cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
.detach_and_log_err(cx);
}
this
}
async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
let (oauth_token, client, auth_url) = this.read_with(cx, |this, _| {
(
this.oauth_token.clone(),
this.client.clone(),
this.settings.auth_url.clone(),
)
})?;
let api_token = request_api_token(
&oauth_token.ok_or_else(|| {
anyhow!("OAuth token is missing while updating Copilot Chat models")
})?,
auth_url,
client.clone(),
)
.await?;
let models_url = this.update(cx, |this, cx| {
this.api_token = Some(api_token.clone());
cx.notify();
this.settings.models_url.clone()
})?;
let models = get_models(models_url, api_token.api_key, client.clone()).await?;
this.update(cx, |this, cx| {
this.models = Some(models);
cx.notify();
})?;
anyhow::Ok(())
}
pub fn is_authenticated(&self) -> bool {
@@ -471,23 +449,20 @@ impl CopilotChat {
.flatten()
.context("Copilot chat is not enabled")?;
let (oauth_token, api_token, client, api_url, auth_url) =
this.read_with(&cx, |this, _| {
(
this.oauth_token.clone(),
this.api_token.clone(),
this.client.clone(),
this.settings.api_url.clone(),
this.settings.auth_url.clone(),
)
})?;
let (oauth_token, api_token, client) = this.read_with(&cx, |this, _| {
(
this.oauth_token.clone(),
this.api_token.clone(),
this.client.clone(),
)
})?;
let oauth_token = oauth_token.context("No OAuth token available")?;
let token = match api_token {
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
_ => {
let token = request_api_token(&oauth_token, auth_url, client.clone()).await?;
let token = request_api_token(&oauth_token, client.clone()).await?;
this.update(&mut cx, |this, cx| {
this.api_token = Some(token.clone());
cx.notify();
@@ -496,28 +471,12 @@ impl CopilotChat {
}
};
stream_completion(client.clone(), token.api_key, api_url, request).await
}
pub fn set_settings(&mut self, settings: CopilotChatSettings, cx: &mut Context<Self>) {
let same_settings = self.settings == settings;
self.settings = settings;
if !same_settings {
cx.spawn(async move |this, cx| {
Self::update_models(&this, cx).await?;
Ok::<_, anyhow::Error>(())
})
.detach();
}
stream_completion(client.clone(), token.api_key, request).await
}
}
async fn get_models(
models_url: Arc<str>,
api_token: String,
client: Arc<dyn HttpClient>,
) -> Result<Vec<Model>> {
let all_models = request_models(models_url, api_token, client).await?;
async fn get_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
let all_models = request_models(api_token, client).await?;
let mut models: Vec<Model> = all_models
.into_iter()
@@ -545,14 +504,10 @@ async fn get_models(
Ok(models)
}
async fn request_models(
models_url: Arc<str>,
api_token: String,
client: Arc<dyn HttpClient>,
) -> Result<Vec<Model>> {
async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
let request_builder = HttpRequest::builder()
.method(Method::GET)
.uri(models_url.as_ref())
.uri(COPILOT_CHAT_MODELS_URL)
.header("Authorization", format!("Bearer {}", api_token))
.header("Content-Type", "application/json")
.header("Copilot-Integration-Id", "vscode-chat");
@@ -576,14 +531,10 @@ async fn request_models(
Ok(models)
}
async fn request_api_token(
oauth_token: &str,
auth_url: Arc<str>,
client: Arc<dyn HttpClient>,
) -> Result<ApiToken> {
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
let request_builder = HttpRequest::builder()
.method(Method::GET)
.uri(auth_url.as_ref())
.uri(COPILOT_CHAT_AUTH_URL)
.header("Authorization", format!("token {}", oauth_token))
.header("Accept", "application/json");
@@ -628,7 +579,6 @@ fn extract_oauth_token(contents: String) -> Option<String> {
async fn stream_completion(
client: Arc<dyn HttpClient>,
api_key: String,
completion_url: Arc<str>,
request: Request,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let is_vision_request = request.messages.last().map_or(false, |message| match message {
@@ -642,7 +592,7 @@ async fn stream_completion(
let request_builder = HttpRequest::builder()
.method(Method::POST)
.uri(completion_url.as_ref())
.uri(COPILOT_CHAT_COMPLETION_URL)
.header(
"Editor-Version",
format!(

View File

@@ -23,7 +23,6 @@ doctest = false
[dependencies]
anyhow.workspace = true
async-trait.workspace = true
collections.workspace = true
dap.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -1,18 +1,16 @@
use anyhow::{Result, bail};
use anyhow::Result;
use async_trait::async_trait;
use collections::FxHashMap;
use dap::{
DebugRequest, StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
DebugRequest, StartDebuggingRequestArguments,
adapters::{
DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition,
},
};
use gpui::{AsyncApp, SharedString};
use language::LanguageName;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::path::PathBuf;
use std::{ffi::OsStr, sync::Arc};
use std::sync::Arc;
use task::{DebugScenario, ZedDebugConfig};
use util::command::new_smol_command;
@@ -23,18 +21,6 @@ impl RubyDebugAdapter {
const ADAPTER_NAME: &'static str = "Ruby";
}
#[derive(Serialize, Deserialize)]
struct RubyDebugConfig {
script_or_command: Option<String>,
script: Option<String>,
command: Option<String>,
#[serde(default)]
args: Vec<String>,
#[serde(default)]
env: FxHashMap<String, String>,
cwd: Option<PathBuf>,
}
#[async_trait(?Send)]
impl DebugAdapter for RubyDebugAdapter {
fn name(&self) -> DebugAdapterName {
@@ -45,70 +31,185 @@ impl DebugAdapter for RubyDebugAdapter {
Some(SharedString::new_static("Ruby").into())
}
fn request_kind(&self, _: &serde_json::Value) -> Result<StartDebuggingRequestArgumentsRequest> {
Ok(StartDebuggingRequestArgumentsRequest::Launch)
}
async fn dap_schema(&self) -> serde_json::Value {
json!({
"type": "object",
"properties": {
"command": {
"type": "string",
"description": "Command name (ruby, rake, bin/rails, bundle exec ruby, etc)",
"oneOf": [
{
"allOf": [
{
"type": "object",
"required": ["request"],
"properties": {
"request": {
"type": "string",
"enum": ["launch"],
"description": "Request to launch a new process"
}
}
},
{
"type": "object",
"required": ["script"],
"properties": {
"command": {
"type": "string",
"description": "Command name (ruby, rake, bin/rails, bundle exec ruby, etc)",
"default": "ruby"
},
"script": {
"type": "string",
"description": "Absolute path to a Ruby file."
},
"cwd": {
"type": "string",
"description": "Directory to execute the program in",
"default": "${ZED_WORKTREE_ROOT}"
},
"args": {
"type": "array",
"description": "Command line arguments passed to the program",
"items": {
"type": "string"
},
"default": []
},
"env": {
"type": "object",
"description": "Additional environment variables to pass to the debugging (and debugged) process",
"default": {}
},
"showProtocolLog": {
"type": "boolean",
"description": "Show a log of DAP requests, events, and responses",
"default": false
},
"useBundler": {
"type": "boolean",
"description": "Execute Ruby programs with `bundle exec` instead of directly",
"default": false
},
"bundlePath": {
"type": "string",
"description": "Location of the bundle executable"
},
"rdbgPath": {
"type": "string",
"description": "Location of the rdbg executable"
},
"askParameters": {
"type": "boolean",
"description": "Ask parameters at first."
},
"debugPort": {
"type": "string",
"description": "UNIX domain socket name or TPC/IP host:port"
},
"waitLaunchTime": {
"type": "number",
"description": "Wait time before connection in milliseconds"
},
"localfs": {
"type": "boolean",
"description": "true if the VSCode and debugger run on a same machine",
"default": false
},
"useTerminal": {
"type": "boolean",
"description": "Create a new terminal and then execute commands there",
"default": false
}
}
}
]
},
"script": {
"type": "string",
"description": "Absolute path to a Ruby file."
},
"cwd": {
"type": "string",
"description": "Directory to execute the program in",
"default": "${ZED_WORKTREE_ROOT}"
},
"args": {
"type": "array",
"description": "Command line arguments passed to the program",
"items": {
"type": "string"
},
"default": []
},
"env": {
"type": "object",
"description": "Additional environment variables to pass to the debugging (and debugged) process",
"default": {}
},
}
{
"allOf": [
{
"type": "object",
"required": ["request"],
"properties": {
"request": {
"type": "string",
"enum": ["attach"],
"description": "Request to attach to an existing process"
}
}
},
{
"type": "object",
"properties": {
"rdbgPath": {
"type": "string",
"description": "Location of the rdbg executable"
},
"debugPort": {
"type": "string",
"description": "UNIX domain socket name or TPC/IP host:port"
},
"showProtocolLog": {
"type": "boolean",
"description": "Show a log of DAP requests, events, and responses",
"default": false
},
"localfs": {
"type": "boolean",
"description": "true if the VSCode and debugger run on a same machine",
"default": false
},
"localfsMap": {
"type": "string",
"description": "Specify pairs of remote root path and local root path like `/remote_dir:/local_dir`. You can specify multiple pairs like `/rem1:/loc1,/rem2:/loc2` by concatenating with `,`."
},
"env": {
"type": "object",
"description": "Additional environment variables to pass to the rdbg process",
"default": {}
}
}
}
]
}
]
})
}
fn config_from_zed_format(&self, zed_scenario: ZedDebugConfig) -> Result<DebugScenario> {
match zed_scenario.request {
let mut config = serde_json::Map::new();
match &zed_scenario.request {
DebugRequest::Launch(launch) => {
let config = RubyDebugConfig {
script_or_command: Some(launch.program),
script: None,
command: None,
args: launch.args,
env: launch.env,
cwd: launch.cwd.clone(),
};
config.insert("request".to_string(), json!("launch"));
config.insert("script".to_string(), json!(launch.program));
config.insert("command".to_string(), json!("ruby"));
let config = serde_json::to_value(config)?;
if !launch.args.is_empty() {
config.insert("args".to_string(), json!(launch.args));
}
Ok(DebugScenario {
adapter: zed_scenario.adapter,
label: zed_scenario.label,
config,
tcp_connection: None,
build: None,
})
if !launch.env.is_empty() {
config.insert("env".to_string(), json!(launch.env));
}
if let Some(cwd) = &launch.cwd {
config.insert("cwd".to_string(), json!(cwd));
}
// Ruby stops on entry so there's no need to handle that case
}
DebugRequest::Attach(_) => {
anyhow::bail!("Attach requests are unsupported");
DebugRequest::Attach(attach) => {
config.insert("request".to_string(), json!("attach"));
config.insert("processId".to_string(), json!(attach.process_id));
}
}
Ok(DebugScenario {
adapter: zed_scenario.adapter,
label: zed_scenario.label,
config: serde_json::Value::Object(config),
tcp_connection: None,
build: None,
})
}
async fn get_binary(
@@ -146,34 +247,13 @@ impl DebugAdapter for RubyDebugAdapter {
let tcp_connection = definition.tcp_connection.clone().unwrap_or_default();
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
let ruby_config = serde_json::from_value::<RubyDebugConfig>(definition.config.clone())?;
let mut arguments = vec![
let arguments = vec![
"--open".to_string(),
format!("--port={}", port),
format!("--host={}", host),
];
if let Some(script) = &ruby_config.script {
arguments.push(script.clone());
} else if let Some(command) = &ruby_config.command {
arguments.push("--command".to_string());
arguments.push(command.clone());
} else if let Some(command_or_script) = &ruby_config.script_or_command {
if delegate
.which(OsStr::new(&command_or_script))
.await
.is_some()
{
arguments.push("--command".to_string());
}
arguments.push(command_or_script.clone());
} else {
bail!("Ruby debug config must have 'script' or 'command' args");
}
arguments.extend(ruby_config.args);
Ok(DebugAdapterBinary {
command: rdbg_path.to_string_lossy().to_string(),
arguments,
@@ -182,12 +262,8 @@ impl DebugAdapter for RubyDebugAdapter {
port,
timeout,
}),
cwd: Some(
ruby_config
.cwd
.unwrap_or(delegate.worktree_root_path().to_owned()),
),
envs: ruby_config.env.into_iter().collect(),
cwd: None,
envs: std::collections::HashMap::default(),
request_args: StartDebuggingRequestArguments {
request: self.request_kind(&definition.config)?,
configuration: definition.config.clone(),

View File

@@ -39,7 +39,6 @@ file_icons.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
itertools.workspace = true
language.workspace = true
log.workspace = true
menu.workspace = true

View File

@@ -342,7 +342,7 @@ impl DebugPanel {
window.defer(cx, move |window, cx| {
workspace
.update(cx, |workspace, cx| {
NewProcessModal::show(workspace, window, NewProcessMode::Debug, None, cx);
NewProcessModal::show(workspace, window, NewProcessMode::Launch, None, cx);
})
.ok();
});

View File

@@ -19,7 +19,6 @@ use gpui::{
InteractiveText, KeyContext, PromptButton, PromptLevel, Render, StyledText, Subscription,
TextStyle, UnderlineStyle, WeakEntity,
};
use itertools::Itertools as _;
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
use project::{ProjectPath, TaskContexts, TaskSourceKind, task_store::TaskStore};
use settings::{Settings, initial_local_debug_tasks_content};
@@ -50,7 +49,7 @@ pub(super) struct NewProcessModal {
mode: NewProcessMode,
debug_picker: Entity<Picker<DebugDelegate>>,
attach_mode: Entity<AttachMode>,
launch_mode: Entity<ConfigureMode>,
launch_mode: Entity<LaunchMode>,
task_mode: TaskMode,
debugger: Option<DebugAdapterName>,
// save_scenario_state: Option<SaveScenarioState>,
@@ -98,13 +97,13 @@ impl NewProcessModal {
workspace.toggle_modal(window, cx, |window, cx| {
let attach_mode = AttachMode::new(None, workspace_handle.clone(), window, cx);
let debug_picker = cx.new(|cx| {
let launch_picker = cx.new(|cx| {
let delegate =
DebugDelegate::new(debug_panel.downgrade(), task_store.clone());
Picker::uniform_list(delegate, window, cx).modal(false)
});
let configure_mode = ConfigureMode::new(window, cx);
let configure_mode = LaunchMode::new(window, cx);
let task_overrides = Some(TaskOverrides { reveal_target });
@@ -123,7 +122,7 @@ impl NewProcessModal {
};
let _subscriptions = [
cx.subscribe(&debug_picker, |_, _, _, cx| {
cx.subscribe(&launch_picker, |_, _, _, cx| {
cx.emit(DismissEvent);
}),
cx.subscribe(
@@ -138,76 +137,19 @@ impl NewProcessModal {
];
cx.spawn_in(window, {
let debug_picker = debug_picker.downgrade();
let launch_picker = launch_picker.downgrade();
let configure_mode = configure_mode.downgrade();
let task_modal = task_mode.task_modal.downgrade();
let workspace = workspace_handle.clone();
async move |this, cx| {
let task_contexts = task_contexts.await;
let task_contexts = Arc::new(task_contexts);
let lsp_task_sources = task_contexts.lsp_task_sources.clone();
let task_position = task_contexts.latest_selection;
// Get LSP tasks and filter out based on language vs lsp preference
let (lsp_tasks, prefer_lsp) =
workspace.update(cx, |workspace, cx| {
let lsp_tasks = editor::lsp_tasks(
workspace.project().clone(),
&lsp_task_sources,
task_position,
cx,
);
let prefer_lsp = workspace
.active_item(cx)
.and_then(|item| item.downcast::<Editor>())
.map(|editor| {
editor
.read(cx)
.buffer()
.read(cx)
.language_settings(cx)
.tasks
.prefer_lsp
})
.unwrap_or(false);
(lsp_tasks, prefer_lsp)
})?;
let lsp_tasks = lsp_tasks.await;
let add_current_language_tasks = !prefer_lsp || lsp_tasks.is_empty();
let lsp_tasks = lsp_tasks
.into_iter()
.flat_map(|(kind, tasks_with_locations)| {
tasks_with_locations
.into_iter()
.sorted_by_key(|(location, task)| {
(location.is_none(), task.resolved_label.clone())
})
.map(move |(_, task)| (kind.clone(), task))
})
.collect::<Vec<_>>();
let Some(task_inventory) = task_store
.update(cx, |task_store, _| task_store.task_inventory().cloned())?
else {
return Ok(());
};
let (used_tasks, current_resolved_tasks) =
task_inventory.update(cx, |task_inventory, cx| {
task_inventory
.used_and_current_resolved_tasks(&task_contexts, cx)
})?;
debug_picker
launch_picker
.update_in(cx, |picker, window, cx| {
picker.delegate.tasks_loaded(
picker.delegate.task_contexts_loaded(
task_contexts.clone(),
languages,
lsp_tasks.clone(),
current_resolved_tasks.clone(),
add_current_language_tasks,
window,
cx,
);
picker.refresh(window, cx);
@@ -228,15 +170,7 @@ impl NewProcessModal {
task_modal
.update_in(cx, |task_modal, window, cx| {
task_modal.tasks_loaded(
task_contexts,
lsp_tasks,
used_tasks,
current_resolved_tasks,
add_current_language_tasks,
window,
cx,
);
task_modal.task_contexts_loaded(task_contexts, window, cx);
})
.ok();
@@ -244,14 +178,12 @@ impl NewProcessModal {
cx.notify();
})
.ok();
anyhow::Ok(())
}
})
.detach();
Self {
debug_picker,
debug_picker: launch_picker,
attach_mode,
launch_mode: configure_mode,
task_mode,
@@ -888,18 +820,18 @@ impl RenderOnce for AttachMode {
}
#[derive(Clone)]
pub(super) struct ConfigureMode {
pub(super) struct LaunchMode {
program: Entity<Editor>,
cwd: Entity<Editor>,
stop_on_entry: ToggleState,
// save_to_debug_json: ToggleState,
}
impl ConfigureMode {
impl LaunchMode {
pub(super) fn new(window: &mut Window, cx: &mut App) -> Entity<Self> {
let program = cx.new(|cx| Editor::single_line(window, cx));
program.update(cx, |this, cx| {
this.set_placeholder_text("ENV=Zed ~/bin/program --option", cx);
this.set_placeholder_text("ENV=Zed ~/bin/debugger --launch", cx);
});
let cwd = cx.new(|cx| Editor::single_line(window, cx));
@@ -987,7 +919,7 @@ impl ConfigureMode {
.child(adapter_menu),
)
.child(
Label::new("Program")
Label::new("Debugger Program")
.size(ui::LabelSize::Small)
.color(Color::Muted),
)
@@ -1135,29 +1067,21 @@ impl DebugDelegate {
(language, scenario)
}
pub fn tasks_loaded(
pub fn task_contexts_loaded(
&mut self,
task_contexts: Arc<TaskContexts>,
languages: Arc<LanguageRegistry>,
lsp_tasks: Vec<(TaskSourceKind, task::ResolvedTask)>,
current_resolved_tasks: Vec<(TaskSourceKind, task::ResolvedTask)>,
add_current_language_tasks: bool,
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) {
self.task_contexts = Some(task_contexts.clone());
self.task_contexts = Some(task_contexts);
let (recent, scenarios) = self
.task_store
.update(cx, |task_store, cx| {
task_store.task_inventory().map(|inventory| {
inventory.update(cx, |inventory, cx| {
inventory.list_debug_scenarios(
&task_contexts,
lsp_tasks,
current_resolved_tasks,
add_current_language_tasks,
cx,
)
inventory.list_debug_scenarios(self.task_contexts.as_ref().unwrap(), cx)
})
})
})
@@ -1333,17 +1257,12 @@ impl PickerDelegate for DebugDelegate {
.map(|icon| icon.color(Color::Muted).size(IconSize::Small));
let indicator = if matches!(task_kind, Some(TaskSourceKind::Lsp { .. })) {
Some(Indicator::icon(
Icon::new(IconName::BoltFilled)
.color(Color::Muted)
.size(IconSize::Small),
Icon::new(IconName::BoltFilled).color(Color::Muted),
))
} else {
None
};
let icon = icon.map(|icon| {
IconWithIndicator::new(icon, indicator)
.indicator_border_color(Some(cx.theme().colors().border_transparent))
});
let icon = icon.map(|icon| IconWithIndicator::new(icon, indicator));
Some(
ListItem::new(SharedString::from(format!("debug-scenario-selection-{ix}")))

View File

@@ -282,6 +282,16 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
}
}
fn resolve_completions(
&self,
_buffer: Entity<Buffer>,
_completion_indices: Vec<usize>,
_completions: Rc<RefCell<Box<[Completion]>>>,
_cx: &mut Context<Editor>,
) -> gpui::Task<anyhow::Result<bool>> {
Task::ready(Ok(false))
}
fn apply_additional_edits_for_completion(
&self,
_buffer: Entity<Buffer>,

View File

@@ -5,8 +5,8 @@ use std::time::Duration;
use anyhow::{Context as _, Result, anyhow};
use dap::StackFrameId;
use gpui::{
AnyElement, Entity, EventEmitter, FocusHandle, Focusable, ListState, MouseButton, Stateful,
Subscription, Task, WeakEntity, list,
AnyElement, Entity, EventEmitter, FocusHandle, Focusable, MouseButton, ScrollStrategy,
Stateful, Subscription, Task, UniformListScrollHandle, WeakEntity, uniform_list,
};
use crate::StackTraceView;
@@ -35,7 +35,7 @@ pub struct StackFrameList {
selected_ix: Option<usize>,
opened_stack_frame_id: Option<StackFrameId>,
scrollbar_state: ScrollbarState,
list_state: ListState,
scroll_handle: UniformListScrollHandle,
_refresh_task: Task<()>,
}
@@ -54,6 +54,7 @@ impl StackFrameList {
cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle();
let scroll_handle = UniformListScrollHandle::new();
let _subscription =
cx.subscribe_in(&session, window, |this, _, event, window, cx| match event {
@@ -66,16 +67,8 @@ impl StackFrameList {
_ => {}
});
let list_state = ListState::new(0, gpui::ListAlignment::Top, px(1000.), {
let this = cx.weak_entity();
move |ix, _window, cx| {
this.update(cx, |this, cx| this.render_entry(ix, cx))
.unwrap_or(div().into_any())
}
});
let scrollbar_state = ScrollbarState::new(list_state.clone());
let mut this = Self {
scrollbar_state: ScrollbarState::new(scroll_handle.clone()),
session,
workspace,
focus_handle,
@@ -84,8 +77,7 @@ impl StackFrameList {
entries: Default::default(),
selected_ix: None,
opened_stack_frame_id: None,
list_state,
scrollbar_state,
scroll_handle,
_refresh_task: Task::ready(()),
};
this.schedule_refresh(true, window, cx);
@@ -222,7 +214,6 @@ impl StackFrameList {
self.selected_ix = ix;
}
self.list_state.reset(self.entries.len());
cx.emit(StackFrameListEvent::BuiltEntries);
cx.notify();
}
@@ -564,6 +555,10 @@ impl StackFrameList {
fn select_ix(&mut self, ix: Option<usize>, cx: &mut Context<Self>) {
self.selected_ix = ix;
if let Some(ix) = self.selected_ix {
self.scroll_handle
.scroll_to_item(ix, ScrollStrategy::Center);
}
cx.notify();
}
@@ -647,8 +642,15 @@ impl StackFrameList {
self.activate_selected_entry(window, cx);
}
fn render_list(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
list(self.list_state.clone()).size_full()
fn render_list(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
uniform_list(
cx.entity(),
"stack-frame-list",
self.entries.len(),
|this, range, _window, cx| range.map(|ix| this.render_entry(ix, cx)).collect(),
)
.track_scroll(self.scroll_handle.clone())
.size_full()
}
}

View File

@@ -11,7 +11,7 @@ use editor::{
};
use gpui::{TestAppContext, VisualTestContext};
use indoc::indoc;
use language::{DiagnosticSourceKind, Rope};
use language::Rope;
use lsp::LanguageServerId;
use pretty_assertions::assert_eq;
use project::FakeFs;
@@ -105,7 +105,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
}
],
version: None
}, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
}, &[], cx).unwrap();
});
// Open the project diagnostics view while there are already diagnostics.
@@ -176,8 +176,6 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -263,8 +261,6 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -372,8 +368,6 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -471,8 +465,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -515,8 +507,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -558,8 +548,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -572,8 +560,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
diagnostics: vec![],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -614,8 +600,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
}],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -748,8 +732,6 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
diagnostics: diagnostics.clone(),
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -937,8 +919,6 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
diagnostics: diagnostics.clone(),
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -994,8 +974,6 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
..Default::default()
}],
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -1029,8 +1007,6 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
version: None,
diagnostics: Vec::new(),
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -1112,8 +1088,6 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) {
},
],
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -1252,8 +1226,6 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
..Default::default()
}],
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -1305,8 +1277,6 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext)
..Default::default()
}],
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -1408,8 +1378,6 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) {
],
version: None,
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)

View File

@@ -464,7 +464,7 @@ impl BlockMap {
map
}
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader<'_> {
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader {
self.sync(&wrap_snapshot, edits);
*self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
BlockMapReader {
@@ -479,7 +479,7 @@ impl BlockMap {
}
}
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter<'_> {
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter {
self.sync(&wrap_snapshot, edits);
*self.wrap_snapshot.borrow_mut() = wrap_snapshot;
BlockMapWriter(self)
@@ -1327,7 +1327,7 @@ impl BlockSnapshot {
}
}
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows {
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
cursor.seek(&start_row, Bias::Right, &());
let (output_start, input_start) = cursor.start();

View File

@@ -357,7 +357,7 @@ impl FoldMap {
&mut self,
inlay_snapshot: InlaySnapshot,
edits: Vec<InlayEdit>,
) -> (FoldMapWriter<'_>, FoldSnapshot, Vec<FoldEdit>) {
) -> (FoldMapWriter, FoldSnapshot, Vec<FoldEdit>) {
let (snapshot, edits) = self.read(inlay_snapshot, edits);
(FoldMapWriter(self), snapshot, edits)
}
@@ -730,7 +730,7 @@ impl FoldSnapshot {
(line_end - line_start) as u32
}
pub fn row_infos(&self, start_row: u32) -> FoldRows<'_> {
pub fn row_infos(&self, start_row: u32) -> FoldRows {
if start_row > self.transforms.summary().output.lines.row {
panic!("invalid display row {}", start_row);
}

View File

@@ -726,7 +726,7 @@ impl WrapSnapshot {
self.transforms.summary().output.longest_row
}
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
pub fn row_infos(&self, start_row: u32) -> WrapRows {
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
let mut input_row = transforms.start().1.row();

File diff suppressed because it is too large Load Diff

View File

@@ -49,7 +49,6 @@ pub struct EditorSettings {
#[serde(default)]
pub diagnostics_max_severity: Option<DiagnosticSeverity>,
pub inline_code_actions: bool,
pub drag_and_drop_selection: bool,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
@@ -423,7 +422,7 @@ pub struct EditorSettingsContent {
/// Default: always
pub seed_search_query_from_cursor: Option<SeedQuerySetting>,
pub use_smartcase_search: Option<bool>,
/// Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
/// The key to use for adding multiple cursors
///
/// Default: alt
pub multi_cursor_modifier: Option<MultiCursorModifier>,
@@ -496,11 +495,6 @@ pub struct EditorSettingsContent {
///
/// Default: true
pub inline_code_actions: Option<bool>,
/// Whether to allow drag and drop text selection in buffer.
///
/// Default: true
pub drag_and_drop_selection: Option<bool>,
}
// Toolbar related settings

View File

@@ -1907,6 +1907,7 @@ fn test_prev_next_word_boundary(cx: &mut TestAppContext) {
DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4),
])
});
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", editor, cx);
@@ -1926,29 +1927,29 @@ fn test_prev_next_word_boundary(cx: &mut TestAppContext) {
assert_selection_ranges("useˇ std::str::{foo, barˇ}\n\n {baz.qux()}", editor, cx);
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_selection_ranges("use stdˇ::str::{foo, bar}ˇ\n\n {baz.qux()}", editor, cx);
assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", editor, cx);
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_selection_ranges("use std::ˇstr::{foo, bar}\nˇ\n {baz.qux()}", editor, cx);
assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", editor, cx);
editor.move_right(&MoveRight, window, cx);
editor.select_to_previous_word_start(&SelectToPreviousWordStart, window, cx);
assert_selection_ranges(
"use std::«ˇs»tr::{foo, bar}\n«ˇ\n» {baz.qux()}",
"use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}",
editor,
cx,
);
editor.select_to_previous_word_start(&SelectToPreviousWordStart, window, cx);
assert_selection_ranges(
"use std«ˇ::s»tr::{foo, bar«ˇ}\n\n» {baz.qux()}",
"use std«ˇ::s»tr::{foo, bar}\n\n«ˇ {b»az.qux()}",
editor,
cx,
);
editor.select_to_next_word_end(&SelectToNextWordEnd, window, cx);
assert_selection_ranges(
"use std::«ˇs»tr::{foo, bar}«ˇ\n\n» {baz.qux()}",
"use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}",
editor,
cx,
);
@@ -6299,296 +6300,6 @@ async fn test_add_selection_above_below(cx: &mut TestAppContext) {
));
}
#[gpui::test]
async fn test_add_selection_above_below_multi_cursor(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_state(indoc!(
r#"line onˇe
liˇne two
line three
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// test multiple cursors expand in the same direction
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
liˇne three
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// test multiple cursors expand below overflow
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
liˇne thˇree
liˇne foˇur"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test multiple cursors retrieves back correctly
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
liˇne thˇree
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test multiple cursor groups maintain independent direction - first expands up, second shrinks above
cx.assert_editor_state(indoc!(
r#"liˇne onˇe
liˇne two
line three
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.undo_selection(&Default::default(), window, cx);
});
// test undo
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
line three
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.redo_selection(&Default::default(), window, cx);
});
// test redo
cx.assert_editor_state(indoc!(
r#"liˇne onˇe
liˇne two
line three
line four"#
));
cx.set_state(indoc!(
r#"abcd
ef«ghˇ»
ijkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test multiple selections expand in the same direction
cx.assert_editor_state(indoc!(
r#"ab«cdˇ»
ef«ghˇ»
«iˇ»jkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test multiple selection upward overflow
cx.assert_editor_state(indoc!(
r#"ab«cdˇ»
«eˇ»f«ghˇ»
«iˇ»jkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// test multiple selection retrieves back correctly
cx.assert_editor_state(indoc!(
r#"abcd
ef«ghˇ»
«iˇ»jkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// test multiple cursor groups maintain independent direction - first shrinks down, second expands below
cx.assert_editor_state(indoc!(
r#"abcd
ef«ghˇ»
ij«klˇ»
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.undo_selection(&Default::default(), window, cx);
});
// test undo
cx.assert_editor_state(indoc!(
r#"abcd
ef«ghˇ»
«iˇ»jkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.redo_selection(&Default::default(), window, cx);
});
// test redo
cx.assert_editor_state(indoc!(
r#"abcd
ef«ghˇ»
ij«klˇ»
«mˇ»nop"#
));
}
#[gpui::test]
async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.set_state(indoc!(
r#"line onˇe
liˇne two
line three
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
editor.add_selection_below(&Default::default(), window, cx);
editor.add_selection_below(&Default::default(), window, cx);
});
// initial state with two multi cursor groups
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
liˇne thˇree
liˇne foˇur"#
));
// add single cursor in middle - simulate opt click
cx.update_editor(|editor, window, cx| {
let new_cursor_point = DisplayPoint::new(DisplayRow(2), 4);
editor.begin_selection(new_cursor_point, true, 1, window, cx);
editor.end_selection(window, cx);
});
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇne twˇo
liˇneˇ thˇree
liˇne foˇur"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test new added selection expands above and existing selection shrinks
cx.assert_editor_state(indoc!(
r#"line onˇe
liˇneˇ twˇo
liˇneˇ thˇree
line four"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
});
// test new added selection expands above and existing selection shrinks
cx.assert_editor_state(indoc!(
r#"lineˇ onˇe
liˇneˇ twˇo
lineˇ three
line four"#
));
// intial state with two selection groups
cx.set_state(indoc!(
r#"abcd
ef«ghˇ»
ijkl
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_above(&Default::default(), window, cx);
editor.add_selection_above(&Default::default(), window, cx);
});
cx.assert_editor_state(indoc!(
r#"ab«cdˇ»
«eˇ»f«ghˇ»
«iˇ»jkl
«mˇ»nop"#
));
// add single selection in middle - simulate opt drag
cx.update_editor(|editor, window, cx| {
let new_cursor_point = DisplayPoint::new(DisplayRow(2), 3);
editor.begin_selection(new_cursor_point, true, 1, window, cx);
editor.update_selection(
DisplayPoint::new(DisplayRow(2), 4),
0,
gpui::Point::<f32>::default(),
window,
cx,
);
editor.end_selection(window, cx);
});
cx.assert_editor_state(indoc!(
r#"ab«cdˇ»
«eˇ»f«ghˇ»
«iˇ»jk«lˇ»
«mˇ»nop"#
));
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// test new added selection expands below, others shrinks from above
cx.assert_editor_state(indoc!(
r#"abcd
ef«ghˇ»
«iˇ»jk«lˇ»
«mˇ»no«pˇ»"#
));
}
#[gpui::test]
async fn test_select_next(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -13939,8 +13650,6 @@ async fn go_to_prev_overlapping_diagnostic(executor: BackgroundExecutor, cx: &mu
},
],
},
None,
DiagnosticSourceKind::Pushed,
&[],
cx,
)
@@ -21853,203 +21562,3 @@ fn assert_hunk_revert(
cx.assert_editor_state(expected_reverted_text_with_selections);
assert_eq!(actual_hunk_statuses_before, expected_hunk_statuses_before);
}
#[gpui::test(iterations = 10)]
async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let diagnostic_requests = Arc::new(AtomicUsize::new(0));
let counter = diagnostic_requests.clone();
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
json!({
"first.rs": "fn main() { let a = 5; }",
"second.rs": "// Test file",
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
let mut fake_servers = language_registry.register_fake_lsp(
"Rust",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
lsp::DiagnosticOptions {
identifier: None,
inter_file_dependencies: true,
workspace_diagnostics: true,
work_done_progress_options: Default::default(),
},
)),
..Default::default()
},
..Default::default()
},
);
let editor = workspace
.update(cx, |workspace, window, cx| {
workspace.open_abs_path(
PathBuf::from(path!("/a/first.rs")),
OpenOptions::default(),
window,
cx,
)
})
.unwrap()
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let fake_server = fake_servers.next().await.unwrap();
let mut first_request = fake_server
.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(move |params, _| {
let new_result_id = counter.fetch_add(1, atomic::Ordering::Release) + 1;
let result_id = Some(new_result_id.to_string());
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path(path!("/a/first.rs")).unwrap()
);
async move {
Ok(lsp::DocumentDiagnosticReportResult::Report(
lsp::DocumentDiagnosticReport::Full(lsp::RelatedFullDocumentDiagnosticReport {
related_documents: None,
full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
items: Vec::new(),
result_id,
},
}),
))
}
});
let ensure_result_id = |expected: Option<String>, cx: &mut TestAppContext| {
project.update(cx, |project, cx| {
let buffer_id = editor
.read(cx)
.buffer()
.read(cx)
.as_singleton()
.expect("created a singleton buffer")
.read(cx)
.remote_id();
let buffer_result_id = project.lsp_store().read(cx).result_id(buffer_id, cx);
assert_eq!(expected, buffer_result_id);
});
};
ensure_result_id(None, cx);
cx.executor().advance_clock(Duration::from_millis(60));
cx.executor().run_until_parked();
assert_eq!(
diagnostic_requests.load(atomic::Ordering::Acquire),
1,
"Opening file should trigger diagnostic request"
);
first_request
.next()
.await
.expect("should have sent the first diagnostics pull request");
ensure_result_id(Some("1".to_string()), cx);
// Editing should trigger diagnostics
editor.update_in(cx, |editor, window, cx| {
editor.handle_input("2", window, cx)
});
cx.executor().advance_clock(Duration::from_millis(60));
cx.executor().run_until_parked();
assert_eq!(
diagnostic_requests.load(atomic::Ordering::Acquire),
2,
"Editing should trigger diagnostic request"
);
ensure_result_id(Some("2".to_string()), cx);
// Moving cursor should not trigger diagnostic request
editor.update_in(cx, |editor, window, cx| {
editor.change_selections(None, window, cx, |s| {
s.select_ranges([Point::new(0, 0)..Point::new(0, 0)])
});
});
cx.executor().advance_clock(Duration::from_millis(60));
cx.executor().run_until_parked();
assert_eq!(
diagnostic_requests.load(atomic::Ordering::Acquire),
2,
"Cursor movement should not trigger diagnostic request"
);
ensure_result_id(Some("2".to_string()), cx);
// Multiple rapid edits should be debounced
for _ in 0..5 {
editor.update_in(cx, |editor, window, cx| {
editor.handle_input("x", window, cx)
});
}
cx.executor().advance_clock(Duration::from_millis(60));
cx.executor().run_until_parked();
let final_requests = diagnostic_requests.load(atomic::Ordering::Acquire);
assert!(
final_requests <= 4,
"Multiple rapid edits should be debounced (got {final_requests} requests)",
);
ensure_result_id(Some(final_requests.to_string()), cx);
}
#[gpui::test]
async fn test_add_selection_after_moving_with_multiple_cursors(cx: &mut TestAppContext) {
// Regression test for issue #11671
// Previously, adding a cursor after moving multiple cursors would reset
// the cursor count instead of adding to the existing cursors.
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Create a simple buffer with cursor at start
cx.set_state(indoc! {"
ˇaaaa
bbbb
cccc
dddd
eeee
ffff
gggg
hhhh"});
// Add 2 cursors below (so we have 3 total)
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
editor.add_selection_below(&Default::default(), window, cx);
});
// Verify we have 3 cursors
let initial_count = cx.update_editor(|editor, _, _| editor.selections.count());
assert_eq!(
initial_count, 3,
"Should have 3 cursors after adding 2 below"
);
// Move down one line
cx.update_editor(|editor, window, cx| {
editor.move_down(&MoveDown, window, cx);
});
// Add another cursor below
cx.update_editor(|editor, window, cx| {
editor.add_selection_below(&Default::default(), window, cx);
});
// Should now have 4 cursors (3 original + 1 new)
let final_count = cx.update_editor(|editor, _, _| editor.selections.count());
assert_eq!(
final_count, 4,
"Should have 4 cursors after moving and adding another"
);
}

Some files were not shown because too many files have changed in this diff Show More