Compare commits
1 Commits
asdf_copil
...
github-tok
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a6a5f55a05 |
6
.github/actions/build_docs/action.yml
vendored
6
.github/actions/build_docs/action.yml
vendored
@@ -19,12 +19,6 @@ runs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: ./script/linux
|
||||
|
||||
- name: Check for broken links
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1
|
||||
with:
|
||||
args: --no-progress './docs/src/**/*'
|
||||
fail: true
|
||||
|
||||
- name: Build book
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
|
||||
65
.github/workflows/ci.yml
vendored
65
.github/workflows/ci.yml
vendored
@@ -183,9 +183,6 @@ jobs:
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
|
||||
- name: Check modifier use in keymaps
|
||||
run: script/check-keymaps
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
@@ -739,64 +736,6 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
|
||||
path: out/zed-remote-server-freebsd-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
out/zed-remote-server-freebsd-x86_64.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
nix-build:
|
||||
name: Build with Nix
|
||||
uses: ./.github/workflows/nix.yml
|
||||
@@ -811,12 +750,12 @@ jobs:
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, freebsd]
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
steps:
|
||||
- name: gh release
|
||||
run: gh release edit $GITHUB_REF_NAME --draft=false
|
||||
run: gh release edit $GITHUB_REF_NAME --draft=true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
44
.github/workflows/release_nightly.yml
vendored
44
.github/workflows/release_nightly.yml
vendored
@@ -167,50 +167,6 @@ jobs:
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly freebsd
|
||||
|
||||
bundle-nix:
|
||||
name: Build and cache Nix package
|
||||
needs: tests
|
||||
|
||||
2
.github/workflows/unit_evals.yml
vendored
2
.github/workflows/unit_evals.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
- name: Send failure message to Slack channel if needed
|
||||
- name: Send the pull request link into the Slack channel
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
|
||||
@@ -47,7 +47,6 @@
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
"ensure_final_newline_on_save": true,
|
||||
"file_scan_exclusions": [
|
||||
"crates/assistant_tools/src/evals/fixtures",
|
||||
"crates/eval/worktrees/",
|
||||
"crates/eval/repos/",
|
||||
"**/.git",
|
||||
|
||||
45
Cargo.lock
generated
45
Cargo.lock
generated
@@ -59,7 +59,7 @@ dependencies = [
|
||||
"assistant_slash_command",
|
||||
"assistant_slash_commands",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"audio",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
@@ -99,7 +99,6 @@ dependencies = [
|
||||
"paths",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
@@ -131,7 +130,6 @@ dependencies = [
|
||||
"urlencoding",
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
@@ -149,6 +147,7 @@ dependencies = [
|
||||
"deepseek",
|
||||
"fs",
|
||||
"gpui",
|
||||
"indexmap",
|
||||
"language_model",
|
||||
"lmstudio",
|
||||
"log",
|
||||
@@ -632,6 +631,7 @@ name = "assistant_tool"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"clock",
|
||||
"collections",
|
||||
@@ -653,7 +653,6 @@ dependencies = [
|
||||
"settings",
|
||||
"text",
|
||||
"util",
|
||||
"watch",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
@@ -666,6 +665,7 @@ dependencies = [
|
||||
"agent_settings",
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"client",
|
||||
@@ -716,7 +716,6 @@ dependencies = [
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"web_search",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
@@ -1075,6 +1074,15 @@ dependencies = [
|
||||
"tungstenite 0.26.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-watch"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a078faf4e27c0c6cc0efb20e5da59dcccc04968ebf2801d8e0b2195124cdcdb2"
|
||||
dependencies = [
|
||||
"event-listener 2.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async_zip"
|
||||
version = "0.0.17"
|
||||
@@ -2979,6 +2987,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assistant_context_editor",
|
||||
"assistant_slash_command",
|
||||
"assistant_tool",
|
||||
"async-stripe",
|
||||
"async-trait",
|
||||
"async-tungstenite",
|
||||
@@ -4225,7 +4234,6 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
@@ -5005,6 +5013,7 @@ dependencies = [
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -5046,7 +5055,6 @@ dependencies = [
|
||||
"unindent",
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zed_llm_client",
|
||||
]
|
||||
@@ -8731,6 +8739,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
@@ -8780,7 +8789,6 @@ dependencies = [
|
||||
"unicase",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
]
|
||||
@@ -10139,6 +10147,7 @@ dependencies = [
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"futures 0.3.31",
|
||||
"http_client",
|
||||
"log",
|
||||
@@ -10148,7 +10157,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"smol",
|
||||
"util",
|
||||
"watch",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -10197,7 +10205,6 @@ dependencies = [
|
||||
"util",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -12999,6 +13006,7 @@ dependencies = [
|
||||
"askpass",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"backtrace",
|
||||
"cargo_toml",
|
||||
"chrono",
|
||||
@@ -13045,7 +13053,6 @@ dependencies = [
|
||||
"toml 0.8.20",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
@@ -15725,7 +15732,6 @@ dependencies = [
|
||||
"task",
|
||||
"theme",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"util",
|
||||
"windows 0.61.1",
|
||||
"workspace-hack",
|
||||
@@ -17907,19 +17913,6 @@ dependencies = [
|
||||
"leb128",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watch"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ctor",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
"rand 0.8.5",
|
||||
"workspace-hack",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-backend"
|
||||
version = "0.3.8"
|
||||
@@ -19731,6 +19724,7 @@ dependencies = [
|
||||
"assistant_context_editor",
|
||||
"assistant_tool",
|
||||
"assistant_tools",
|
||||
"async-watch",
|
||||
"audio",
|
||||
"auto_update",
|
||||
"auto_update_ui",
|
||||
@@ -19847,7 +19841,6 @@ dependencies = [
|
||||
"uuid",
|
||||
"vim",
|
||||
"vim_mode_setting",
|
||||
"watch",
|
||||
"web_search",
|
||||
"web_search_providers",
|
||||
"welcome",
|
||||
|
||||
@@ -165,7 +165,6 @@ members = [
|
||||
"crates/util_macros",
|
||||
"crates/vim",
|
||||
"crates/vim_mode_setting",
|
||||
"crates/watch",
|
||||
"crates/web_search",
|
||||
"crates/web_search_providers",
|
||||
"crates/welcome",
|
||||
@@ -374,7 +373,6 @@ util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
vim = { path = "crates/vim" }
|
||||
vim_mode_setting = { path = "crates/vim_mode_setting" }
|
||||
watch = { path = "crates/watch" }
|
||||
web_search = { path = "crates/web_search" }
|
||||
web_search_providers = { path = "crates/web_search_providers" }
|
||||
welcome = { path = "crates/welcome" }
|
||||
@@ -405,6 +403,7 @@ async-recursion = "1.0.0"
|
||||
async-tar = "0.5.0"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.29.1"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
"ctrl-shift-f5": "debugger::Restart",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"ctrl-f11": "debugger::StepInto",
|
||||
"cmd-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
@@ -59,6 +59,7 @@
|
||||
"tab": "editor::Tab",
|
||||
"shift-tab": "editor::Backtab",
|
||||
"ctrl-k": "editor::CutToEndOfLine",
|
||||
// "ctrl-t": "editor::Transpose",
|
||||
"ctrl-k ctrl-q": "editor::Rewrap",
|
||||
"ctrl-k q": "editor::Rewrap",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
@@ -99,16 +100,21 @@
|
||||
"shift-down": "editor::SelectDown",
|
||||
"shift-left": "editor::SelectLeft",
|
||||
"shift-right": "editor::SelectRight",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextWordEnd",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
|
||||
"ctrl-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
|
||||
"ctrl-shift-home": "editor::SelectToBeginning",
|
||||
"ctrl-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"ctrl-l": "editor::SelectLine",
|
||||
"ctrl-shift-i": "editor::Format",
|
||||
"alt-shift-o": "editor::OrganizeImports",
|
||||
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "cmd-shift-right": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
// "ctrl-shift-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
"shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
// "alt-v": ["editor::MovePageUp", { "center_cursor": true }],
|
||||
"ctrl-alt-space": "editor::ShowCharacterPalette",
|
||||
"ctrl-;": "editor::ToggleLineNumbers",
|
||||
"ctrl-'": "editor::ToggleSelectedDiffHunks",
|
||||
@@ -134,6 +140,7 @@
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": "buffer_search::DeployReplace",
|
||||
// "cmd-e": ["buffer_search::Deploy", { "focus": false }],
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"ctrl-alt-e": "editor::SelectEnclosingSymbol",
|
||||
@@ -146,7 +153,8 @@
|
||||
"context": "Editor && mode == full && edit_prediction",
|
||||
"bindings": {
|
||||
"alt-]": "editor::NextEditPrediction",
|
||||
"alt-[": "editor::PreviousEditPrediction"
|
||||
"alt-[": "editor::PreviousEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -211,6 +219,7 @@
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"save": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
@@ -236,7 +245,6 @@
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
@@ -260,8 +268,8 @@
|
||||
{
|
||||
"context": "AgentPanel && prompt_editor",
|
||||
"bindings": {
|
||||
"ctrl-n": "agent::NewTextThread",
|
||||
"ctrl-alt-t": "agent::NewThread"
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -654,16 +662,14 @@
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"alt-l": "editor::AcceptEditPrediction",
|
||||
"tab": "editor::AcceptEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction_conflict",
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"alt-l": "editor::AcceptEditPrediction",
|
||||
"alt-right": "editor::AcceptPartialEditPrediction"
|
||||
"alt-l": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -181,7 +181,8 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-tab": "editor::NextEditPrediction",
|
||||
"alt-shift-tab": "editor::PreviousEditPrediction"
|
||||
"alt-shift-tab": "editor::PreviousEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -252,6 +253,7 @@
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
@@ -278,7 +280,6 @@
|
||||
"cmd-shift-j": "agent::ToggleNavigationMenu",
|
||||
"cmd-shift-i": "agent::ToggleOptionsMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-alt-e": "agent::RemoveAllContext",
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
"cmd-shift-enter": "agent::ContinueThread",
|
||||
@@ -718,16 +719,14 @@
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"tab": "editor::AcceptEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
"tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction_conflict",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-tab": "editor::AcceptEditPrediction",
|
||||
"ctrl-cmd-right": "editor::AcceptPartialEditPrediction"
|
||||
"alt-tab": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == insert && !menu",
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
// "j k": "vim::SwitchToNormalMode"
|
||||
// "j k": ["workspace::SendKeystrokes", "escape"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"ctrl-shift-d": "editor::DuplicateSelection",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
// "ctrl-f3": "", // find_under (cancels any selections)
|
||||
// "ctrl-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f9": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f9": "editor::SortLinesCaseInsensitive",
|
||||
"f12": "editor::GoToDefinition",
|
||||
|
||||
@@ -28,8 +28,7 @@
|
||||
"context": "InlineAssistEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-backspace": "editor::Cancel",
|
||||
"cmd-enter": "menu::Confirm"
|
||||
"cmd-shift-backspace": "editor::Cancel"
|
||||
// "alt-enter": // Quick Question
|
||||
// "cmd-shift-enter": // Full File Context
|
||||
// "cmd-shift-k": // Toggle input focus (editor <> inline assist)
|
||||
|
||||
@@ -711,7 +711,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel || GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
|
||||
"context": "GitPanel || ProjectPanel || CollabPanel || OutlinePanel || ChatPanel || VimControl || EmptyPane || SharedScreen || MarkdownPreview || KeyContextView || DebugPanel",
|
||||
"bindings": {
|
||||
// window related commands (ctrl-w X)
|
||||
"ctrl-w": null,
|
||||
|
||||
@@ -101,12 +101,9 @@
|
||||
// The second option is decimal.
|
||||
"unit": "binary"
|
||||
},
|
||||
// Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
|
||||
//
|
||||
// 1. Maps to `Alt` on Linux and Windows and to `Option` on MacOS:
|
||||
// "alt"
|
||||
// 2. Maps `Control` on Linux and Windows and to `Command` on MacOS:
|
||||
// "cmd_or_ctrl" (alias: "cmd", "ctrl")
|
||||
// The key to use for adding multiple cursors
|
||||
// Currently "alt" or "cmd_or_ctrl" (also aliased as
|
||||
// "cmd" and "ctrl") are supported.
|
||||
"multi_cursor_modifier": "alt",
|
||||
// Whether to enable vim modes and key bindings.
|
||||
"vim_mode": false,
|
||||
@@ -217,8 +214,6 @@
|
||||
"show_signature_help_after_edits": false,
|
||||
// Whether to show code action button at start of buffer line.
|
||||
"inline_code_actions": true,
|
||||
// Whether to allow drag and drop text selection in buffer.
|
||||
"drag_and_drop_selection": true,
|
||||
// What to do when go to definition yields no results.
|
||||
//
|
||||
// 1. Do nothing: `none`
|
||||
@@ -604,9 +599,7 @@
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
},
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -778,6 +771,7 @@
|
||||
"tools": {
|
||||
"copy_path": true,
|
||||
"create_directory": true,
|
||||
"create_file": true,
|
||||
"delete_path": true,
|
||||
"diagnostics": true,
|
||||
"edit_file": true,
|
||||
@@ -1040,14 +1034,6 @@
|
||||
"button": true,
|
||||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true,
|
||||
// Settings for using LSP pull diagnostics mechanism in Zed.
|
||||
"lsp_pull_diagnostics": {
|
||||
// Whether to pull for diagnostics or not.
|
||||
"enabled": true,
|
||||
// Minimum time to wait before pulling diagnostics from the language server(s).
|
||||
// 0 turns the debounce off.
|
||||
"debounce_ms": 50
|
||||
},
|
||||
// Settings for inline diagnostics
|
||||
"inline": {
|
||||
// Whether to show diagnostics inline or not
|
||||
@@ -1471,9 +1457,7 @@
|
||||
"language_servers": ["erlang-ls", "!elp", "..."]
|
||||
},
|
||||
"Git Commit": {
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width",
|
||||
"preferred_line_length": 72
|
||||
"allow_rewrap": "anywhere"
|
||||
},
|
||||
"Go": {
|
||||
"code_actions_on_format": {
|
||||
@@ -1551,6 +1535,12 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"SQL": {
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-sql"]
|
||||
}
|
||||
},
|
||||
"Starlark": {
|
||||
"language_servers": ["starpls", "!buck2-lsp", "..."]
|
||||
},
|
||||
|
||||
@@ -261,11 +261,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#bfbdb6ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d2a6ffff",
|
||||
"font_style": null,
|
||||
@@ -321,16 +316,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#d2a6ffff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#5ac1feff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#a9d94bff",
|
||||
"font_style": null,
|
||||
@@ -457,9 +442,9 @@
|
||||
"terminal.foreground": "#5c6166ff",
|
||||
"terminal.bright_foreground": "#5c6166ff",
|
||||
"terminal.dim_foreground": "#fcfcfcff",
|
||||
"terminal.ansi.black": "#5c6166ff",
|
||||
"terminal.ansi.bright_black": "#3b9ee5ff",
|
||||
"terminal.ansi.dim_black": "#9c9fa2ff",
|
||||
"terminal.ansi.black": "#fcfcfcff",
|
||||
"terminal.ansi.bright_black": "#bcbec0ff",
|
||||
"terminal.ansi.dim_black": "#5c6166ff",
|
||||
"terminal.ansi.red": "#ef7271ff",
|
||||
"terminal.ansi.bright_red": "#febab6ff",
|
||||
"terminal.ansi.dim_red": "#833538ff",
|
||||
@@ -478,9 +463,9 @@
|
||||
"terminal.ansi.cyan": "#4dbf99ff",
|
||||
"terminal.ansi.bright_cyan": "#ace0cbff",
|
||||
"terminal.ansi.dim_cyan": "#2a5f4aff",
|
||||
"terminal.ansi.white": "#fcfcfcff",
|
||||
"terminal.ansi.bright_white": "#fcfcfcff",
|
||||
"terminal.ansi.dim_white": "#bcbec0ff",
|
||||
"terminal.ansi.white": "#5c6166ff",
|
||||
"terminal.ansi.bright_white": "#5c6166ff",
|
||||
"terminal.ansi.dim_white": "#9c9fa2ff",
|
||||
"link_text.hover": "#3b9ee5ff",
|
||||
"conflict": "#f1ad49ff",
|
||||
"conflict.background": "#ffeedaff",
|
||||
@@ -647,11 +632,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#5c6166ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#a37accff",
|
||||
"font_style": null,
|
||||
@@ -707,16 +687,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#a37accff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#3b9ee5ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#86b300ff",
|
||||
"font_style": null,
|
||||
@@ -1033,11 +1003,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#cccac2ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#dfbfffff",
|
||||
"font_style": null,
|
||||
@@ -1093,16 +1058,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#dfbfffff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#72cffeff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#d4fe7fff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -270,11 +270,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -330,16 +325,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -670,11 +655,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -730,16 +710,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -1070,11 +1040,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#d3869bff",
|
||||
"font_style": null,
|
||||
@@ -1130,16 +1095,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#fabd2eff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#b8bb25ff",
|
||||
"font_style": null,
|
||||
@@ -1272,9 +1227,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#fbf1c7ff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#0b6678ff",
|
||||
"terminal.ansi.dim_black": "#5f5650ff",
|
||||
"terminal.ansi.black": "#fbf1c7ff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -1293,9 +1248,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#fbf1c7ff",
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -1470,11 +1425,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -1530,16 +1480,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
@@ -1672,9 +1612,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#f9f5d7ff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#73675eff",
|
||||
"terminal.ansi.dim_black": "#f9f5d7ff",
|
||||
"terminal.ansi.black": "#f9f5d7ff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -1693,9 +1633,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#f9f5d7ff",
|
||||
"terminal.ansi.bright_white": "#f9f5d7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -1870,11 +1810,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -1930,16 +1865,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
@@ -2072,9 +1997,9 @@
|
||||
"terminal.foreground": "#282828ff",
|
||||
"terminal.bright_foreground": "#282828ff",
|
||||
"terminal.dim_foreground": "#f2e5bcff",
|
||||
"terminal.ansi.black": "#282828ff",
|
||||
"terminal.ansi.bright_black": "#73675eff",
|
||||
"terminal.ansi.dim_black": "#f2e5bcff",
|
||||
"terminal.ansi.black": "#f2e5bcff",
|
||||
"terminal.ansi.bright_black": "#b0a189ff",
|
||||
"terminal.ansi.dim_black": "#282828ff",
|
||||
"terminal.ansi.red": "#9d0308ff",
|
||||
"terminal.ansi.bright_red": "#db8b7aff",
|
||||
"terminal.ansi.dim_red": "#4e1207ff",
|
||||
@@ -2093,9 +2018,9 @@
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
"terminal.ansi.white": "#f2e5bcff",
|
||||
"terminal.ansi.bright_white": "#f2e5bcff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"terminal.ansi.white": "#282828ff",
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
@@ -2270,11 +2195,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#8f3e71ff",
|
||||
"font_style": null,
|
||||
@@ -2330,16 +2250,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#b57613ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#79740eff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -264,11 +264,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#dce0e5ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#bf956aff",
|
||||
"font_style": null,
|
||||
@@ -324,16 +319,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#dfc184ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#74ade8ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#a1c181ff",
|
||||
"font_style": null,
|
||||
@@ -465,9 +450,9 @@
|
||||
"terminal.foreground": "#242529ff",
|
||||
"terminal.bright_foreground": "#242529ff",
|
||||
"terminal.dim_foreground": "#fafafaff",
|
||||
"terminal.ansi.black": "#242529ff",
|
||||
"terminal.ansi.bright_black": "#242529ff",
|
||||
"terminal.ansi.dim_black": "#97979aff",
|
||||
"terminal.ansi.black": "#fafafaff",
|
||||
"terminal.ansi.bright_black": "#aaaaaaff",
|
||||
"terminal.ansi.dim_black": "#242529ff",
|
||||
"terminal.ansi.red": "#d36151ff",
|
||||
"terminal.ansi.bright_red": "#f0b0a4ff",
|
||||
"terminal.ansi.dim_red": "#6f312aff",
|
||||
@@ -486,9 +471,9 @@
|
||||
"terminal.ansi.cyan": "#3a82b7ff",
|
||||
"terminal.ansi.bright_cyan": "#a3bedaff",
|
||||
"terminal.ansi.dim_cyan": "#254058ff",
|
||||
"terminal.ansi.white": "#fafafaff",
|
||||
"terminal.ansi.bright_white": "#fafafaff",
|
||||
"terminal.ansi.dim_white": "#aaaaaaff",
|
||||
"terminal.ansi.white": "#242529ff",
|
||||
"terminal.ansi.bright_white": "#242529ff",
|
||||
"terminal.ansi.dim_white": "#97979aff",
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
@@ -658,11 +643,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"namespace": {
|
||||
"color": "#242529ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"number": {
|
||||
"color": "#ad6e25ff",
|
||||
"font_style": null,
|
||||
@@ -718,16 +698,6 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector": {
|
||||
"color": "#669f59ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"selector.pseudo": {
|
||||
"color": "#5c78e2ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"string": {
|
||||
"color": "#649f57ff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -25,6 +25,7 @@ assistant_context_editor.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_slash_commands.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-watch.workspace = true
|
||||
audio.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
@@ -94,7 +95,6 @@ ui_input.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
watch.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
@@ -102,13 +102,11 @@ zed_llm_client.workspace = true
|
||||
zstd.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
assistant_tools.workspace = true
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
language_model = { workspace = true, "features" = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
|
||||
@@ -1144,10 +1144,6 @@ impl ActiveThread {
|
||||
cx,
|
||||
);
|
||||
}
|
||||
ThreadEvent::ProfileChanged => {
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ mod agent_configuration;
|
||||
mod agent_diff;
|
||||
mod agent_model_selector;
|
||||
mod agent_panel;
|
||||
mod agent_profile;
|
||||
mod buffer_codegen;
|
||||
mod context;
|
||||
mod context_picker;
|
||||
|
||||
@@ -2,21 +2,25 @@ mod profile_modal_header;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use convert_case::{Case, Casing as _};
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
|
||||
use settings::Settings as _;
|
||||
use gpui::{
|
||||
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, WeakEntity,
|
||||
prelude::*,
|
||||
};
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use ui::{
|
||||
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::agent_profile::AgentProfile;
|
||||
use crate::{AgentPanel, ManageProfiles};
|
||||
use crate::{AgentPanel, ManageProfiles, ThreadStore};
|
||||
|
||||
use super::tool_picker::ToolPickerMode;
|
||||
|
||||
@@ -99,6 +103,7 @@ pub struct NewProfileMode {
|
||||
pub struct ManageProfilesModal {
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
focus_handle: FocusHandle,
|
||||
mode: Mode,
|
||||
}
|
||||
@@ -114,8 +119,9 @@ impl ManageProfilesModal {
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let thread_store = panel.read(cx).thread_store();
|
||||
let tools = thread_store.read(cx).tools();
|
||||
let thread_store = thread_store.downgrade();
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
let mut this = Self::new(fs, tools, window, cx);
|
||||
let mut this = Self::new(fs, tools, thread_store, window, cx);
|
||||
|
||||
if let Some(profile_id) = action.customize_tools.clone() {
|
||||
this.configure_builtin_tools(profile_id, window, cx);
|
||||
@@ -130,6 +136,7 @@ impl ManageProfilesModal {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -138,6 +145,7 @@ impl ManageProfilesModal {
|
||||
Self {
|
||||
fs,
|
||||
tools,
|
||||
thread_store,
|
||||
focus_handle,
|
||||
mode: Mode::choose_profile(window, cx),
|
||||
}
|
||||
@@ -198,6 +206,7 @@ impl ManageProfilesModal {
|
||||
ToolPickerMode::McpTools,
|
||||
self.fs.clone(),
|
||||
self.tools.clone(),
|
||||
self.thread_store.clone(),
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
cx,
|
||||
@@ -235,6 +244,7 @@ impl ManageProfilesModal {
|
||||
ToolPickerMode::BuiltinTools,
|
||||
self.fs.clone(),
|
||||
self.tools.clone(),
|
||||
self.thread_store.clone(),
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
cx,
|
||||
@@ -260,10 +270,32 @@ impl ManageProfilesModal {
|
||||
match &self.mode {
|
||||
Mode::ChooseProfile { .. } => {}
|
||||
Mode::NewProfile(mode) => {
|
||||
let name = mode.name_editor.read(cx).text(cx);
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let profile_id =
|
||||
AgentProfile::create(name, mode.base_profile_id.clone(), self.fs.clone(), cx);
|
||||
let base_profile = mode
|
||||
.base_profile_id
|
||||
.as_ref()
|
||||
.and_then(|profile_id| settings.profiles.get(profile_id).cloned());
|
||||
|
||||
let name = mode.name_editor.read(cx).text(cx);
|
||||
let profile_id = AgentProfileId(name.to_case(Case::Kebab).into());
|
||||
|
||||
let profile = AgentProfile {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
self.create_profile(profile_id.clone(), profile, cx);
|
||||
self.view_profile(profile_id, window, cx);
|
||||
}
|
||||
Mode::ViewProfile(_) => {}
|
||||
@@ -293,6 +325,19 @@ impl ManageProfilesModal {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
|
||||
move |settings, _cx| {
|
||||
settings.create_profile(profile_id, profile).log_err();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ModalView for ManageProfilesModal {}
|
||||
@@ -475,13 +520,14 @@ impl ManageProfilesModal {
|
||||
) -> impl IntoElement {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let profile_id = &settings.default_profile;
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(&mode.profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
let icon = match mode.profile_id.as_str() {
|
||||
let icon = match profile_id.as_str() {
|
||||
"write" => IconName::Pencil,
|
||||
"ask" => IconName::MessageBubbles,
|
||||
_ => IconName::UserRoundPen,
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use agent_settings::{
|
||||
AgentProfileContent, AgentProfileId, AgentProfileSettings, AgentSettings, AgentSettingsContent,
|
||||
AgentProfile, AgentProfileContent, AgentProfileId, AgentSettings, AgentSettingsContent,
|
||||
ContextServerPresetContent,
|
||||
};
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::update_settings_file;
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use ui::{ListItem, ListItemSpacing, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::ThreadStore;
|
||||
|
||||
pub struct ToolPicker {
|
||||
picker: Entity<Picker<ToolPickerDelegate>>,
|
||||
}
|
||||
@@ -69,10 +71,11 @@ pub enum PickerItem {
|
||||
|
||||
pub struct ToolPickerDelegate {
|
||||
tool_picker: WeakEntity<ToolPicker>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
items: Arc<Vec<PickerItem>>,
|
||||
profile_id: AgentProfileId,
|
||||
profile_settings: AgentProfileSettings,
|
||||
profile: AgentProfile,
|
||||
filtered_items: Vec<PickerItem>,
|
||||
selected_index: usize,
|
||||
mode: ToolPickerMode,
|
||||
@@ -83,18 +86,20 @@ impl ToolPickerDelegate {
|
||||
mode: ToolPickerMode,
|
||||
fs: Arc<dyn Fs>,
|
||||
tool_set: Entity<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
profile_id: AgentProfileId,
|
||||
profile_settings: AgentProfileSettings,
|
||||
profile: AgentProfile,
|
||||
cx: &mut Context<ToolPicker>,
|
||||
) -> Self {
|
||||
let items = Arc::new(Self::resolve_items(mode, &tool_set, cx));
|
||||
|
||||
Self {
|
||||
tool_picker: cx.entity().downgrade(),
|
||||
thread_store,
|
||||
fs,
|
||||
items,
|
||||
profile_id,
|
||||
profile_settings,
|
||||
profile,
|
||||
filtered_items: Vec::new(),
|
||||
selected_index: 0,
|
||||
mode,
|
||||
@@ -244,31 +249,28 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
};
|
||||
|
||||
let is_currently_enabled = if let Some(server_id) = server_id.clone() {
|
||||
let preset = self
|
||||
.profile_settings
|
||||
.context_servers
|
||||
.entry(server_id)
|
||||
.or_default();
|
||||
let preset = self.profile.context_servers.entry(server_id).or_default();
|
||||
let is_enabled = *preset.tools.entry(tool_name.clone()).or_default();
|
||||
*preset.tools.entry(tool_name.clone()).or_default() = !is_enabled;
|
||||
is_enabled
|
||||
} else {
|
||||
let is_enabled = *self
|
||||
.profile_settings
|
||||
.tools
|
||||
.entry(tool_name.clone())
|
||||
.or_default();
|
||||
*self
|
||||
.profile_settings
|
||||
.tools
|
||||
.entry(tool_name.clone())
|
||||
.or_default() = !is_enabled;
|
||||
let is_enabled = *self.profile.tools.entry(tool_name.clone()).or_default();
|
||||
*self.profile.tools.entry(tool_name.clone()).or_default() = !is_enabled;
|
||||
is_enabled
|
||||
};
|
||||
|
||||
let active_profile_id = &AgentSettings::get_global(cx).default_profile;
|
||||
if active_profile_id == &self.profile_id {
|
||||
self.thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile(self.profile.clone(), cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
update_settings_file::<AgentSettings>(self.fs.clone(), cx, {
|
||||
let profile_id = self.profile_id.clone();
|
||||
let default_profile = self.profile_settings.clone();
|
||||
let default_profile = self.profile.clone();
|
||||
let server_id = server_id.clone();
|
||||
let tool_name = tool_name.clone();
|
||||
move |settings: &mut AgentSettingsContent, _cx| {
|
||||
@@ -346,18 +348,14 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
),
|
||||
PickerItem::Tool { name, server_id } => {
|
||||
let is_enabled = if let Some(server_id) = server_id {
|
||||
self.profile_settings
|
||||
self.profile
|
||||
.context_servers
|
||||
.get(server_id.as_ref())
|
||||
.and_then(|preset| preset.tools.get(name))
|
||||
.copied()
|
||||
.unwrap_or(self.profile_settings.enable_all_context_servers)
|
||||
.unwrap_or(self.profile.enable_all_context_servers)
|
||||
} else {
|
||||
self.profile_settings
|
||||
.tools
|
||||
.get(name)
|
||||
.copied()
|
||||
.unwrap_or(false)
|
||||
self.profile.tools.get(name).copied().unwrap_or(false)
|
||||
};
|
||||
|
||||
Some(
|
||||
|
||||
@@ -1378,8 +1378,7 @@ impl AgentDiff {
|
||||
| ThreadEvent::CheckpointChanged
|
||||
| ThreadEvent::ToolConfirmationNeeded
|
||||
| ThreadEvent::ToolUseLimitReached
|
||||
| ThreadEvent::CancelEditing
|
||||
| ThreadEvent::ProfileChanged => {}
|
||||
| ThreadEvent::CancelEditing => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ use zed_llm_client::{CompletionIntent, UsageLimit};
|
||||
use crate::active_thread::{self, ActiveThread, ActiveThreadEvent};
|
||||
use crate::agent_configuration::{AgentConfiguration, AssistantConfigurationEvent};
|
||||
use crate::agent_diff::AgentDiff;
|
||||
use crate::history_store::{HistoryEntryId, HistoryStore};
|
||||
use crate::history_store::{HistoryStore, RecentEntry};
|
||||
use crate::message_editor::{MessageEditor, MessageEditorEvent};
|
||||
use crate::thread::{Thread, ThreadError, ThreadId, ThreadSummary, TokenUsageRatio};
|
||||
use crate::thread_history::{HistoryEntryElement, ThreadHistory};
|
||||
@@ -257,7 +257,6 @@ impl ActiveView {
|
||||
|
||||
pub fn prompt_editor(
|
||||
context_editor: Entity<ContextEditor>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -323,19 +322,6 @@ impl ActiveView {
|
||||
editor.set_text(summary, window, cx);
|
||||
})
|
||||
}
|
||||
ContextEvent::PathChanged { old_path, new_path } => {
|
||||
history_store.update(cx, |history_store, cx| {
|
||||
if let Some(old_path) = old_path {
|
||||
history_store
|
||||
.replace_recently_opened_text_thread(old_path, new_path, cx);
|
||||
} else {
|
||||
history_store.push_recently_opened_entry(
|
||||
HistoryEntryId::Context(new_path.clone()),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}),
|
||||
@@ -530,7 +516,8 @@ impl AgentPanel {
|
||||
HistoryStore::new(
|
||||
thread_store.clone(),
|
||||
context_store.clone(),
|
||||
[HistoryEntryId::Thread(thread_id)],
|
||||
[RecentEntry::Thread(thread_id, thread.clone())],
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -557,13 +544,7 @@ impl AgentPanel {
|
||||
editor.insert_default_prompt(window, cx);
|
||||
editor
|
||||
});
|
||||
ActiveView::prompt_editor(
|
||||
context_editor,
|
||||
history_store.clone(),
|
||||
language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
ActiveView::prompt_editor(context_editor, language_registry.clone(), window, cx)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -600,9 +581,86 @@ impl AgentPanel {
|
||||
let panel = weak_panel.clone();
|
||||
let assistant_navigation_menu =
|
||||
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
|
||||
if let Some(panel) = panel.upgrade() {
|
||||
menu = Self::populate_recently_opened_menu_section(menu, panel, cx);
|
||||
let recently_opened = panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(cx, |history_store, cx| {
|
||||
history_store.recently_opened_entries(cx)
|
||||
})
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if !recently_opened.is_empty() {
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in recently_opened.iter() {
|
||||
if let RecentEntry::Context(context) = entry {
|
||||
if context.read(cx).path().is_none() {
|
||||
log::error!(
|
||||
"bug: text thread in recent history list was never saved"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let summary = entry.summary(cx);
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
summary,
|
||||
None,
|
||||
{
|
||||
let panel = panel.clone();
|
||||
let entry = entry.clone();
|
||||
move |window, cx| {
|
||||
panel
|
||||
.update(cx, {
|
||||
let entry = entry.clone();
|
||||
move |this, cx| match entry {
|
||||
RecentEntry::Thread(_, thread) => {
|
||||
this.open_thread(thread, window, cx)
|
||||
}
|
||||
RecentEntry::Context(context) => {
|
||||
let Some(path) = context.read(cx).path()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
this.open_saved_prompt_editor(
|
||||
path.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
IconName::Close,
|
||||
"Close Entry".into(),
|
||||
{
|
||||
let panel = panel.clone();
|
||||
let entry = entry.clone();
|
||||
move |_window, cx| {
|
||||
panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(
|
||||
cx,
|
||||
|history_store, cx| {
|
||||
history_store.remove_recently_opened_entry(
|
||||
&entry, cx,
|
||||
);
|
||||
},
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
}
|
||||
|
||||
menu.action("View All", Box::new(OpenHistory))
|
||||
.end_slot_action(DeleteRecentlyOpenThread.boxed_clone())
|
||||
.fixed_width(px(320.).into())
|
||||
@@ -840,7 +898,6 @@ impl AgentPanel {
|
||||
self.set_active_view(
|
||||
ActiveView::prompt_editor(
|
||||
context_editor.clone(),
|
||||
self.history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -927,13 +984,7 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
self.set_active_view(
|
||||
ActiveView::prompt_editor(
|
||||
editor.clone(),
|
||||
self.history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
ActiveView::prompt_editor(editor.clone(), self.language_registry.clone(), window, cx),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1332,6 +1383,16 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
}
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
let context = context_editor.read(cx).context();
|
||||
// When switching away from an unsaved text thread, delete its entry.
|
||||
if context.read(cx).path().is_none() {
|
||||
let context = context.clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1339,14 +1400,13 @@ impl AgentPanel {
|
||||
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
|
||||
if let Some(thread) = thread.upgrade() {
|
||||
let id = thread.read(cx).id().clone();
|
||||
store.push_recently_opened_entry(HistoryEntryId::Thread(id), cx);
|
||||
store.push_recently_opened_entry(RecentEntry::Thread(id, thread), cx);
|
||||
}
|
||||
}),
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
|
||||
store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx)
|
||||
}
|
||||
let context = context_editor.read(cx).context().clone();
|
||||
store.push_recently_opened_entry(RecentEntry::Context(context), cx)
|
||||
})
|
||||
}
|
||||
_ => {}
|
||||
@@ -1365,70 +1425,6 @@ impl AgentPanel {
|
||||
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn populate_recently_opened_menu_section(
|
||||
mut menu: ContextMenu,
|
||||
panel: Entity<Self>,
|
||||
cx: &mut Context<ContextMenu>,
|
||||
) -> ContextMenu {
|
||||
let entries = panel
|
||||
.read(cx)
|
||||
.history_store
|
||||
.read(cx)
|
||||
.recently_opened_entries(cx);
|
||||
|
||||
if entries.is_empty() {
|
||||
return menu;
|
||||
}
|
||||
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in entries {
|
||||
let title = entry.title().clone();
|
||||
let id = entry.id();
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
title,
|
||||
None,
|
||||
{
|
||||
let panel = panel.downgrade();
|
||||
let id = id.clone();
|
||||
move |window, cx| {
|
||||
let id = id.clone();
|
||||
panel
|
||||
.update(cx, move |this, cx| match id {
|
||||
HistoryEntryId::Thread(id) => this
|
||||
.open_thread_by_id(&id, window, cx)
|
||||
.detach_and_log_err(cx),
|
||||
HistoryEntryId::Context(path) => this
|
||||
.open_saved_prompt_editor(path.clone(), window, cx)
|
||||
.detach_and_log_err(cx),
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
IconName::Close,
|
||||
"Close Entry".into(),
|
||||
{
|
||||
let panel = panel.downgrade();
|
||||
let id = id.clone();
|
||||
move |_window, cx| {
|
||||
panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(cx, |history_store, cx| {
|
||||
history_store.remove_recently_opened_entry(&id, cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
|
||||
menu
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for AgentPanel {
|
||||
|
||||
@@ -1,334 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
|
||||
use assistant_tool::{Tool, ToolSource, ToolWorkingSet};
|
||||
use collections::IndexMap;
|
||||
use convert_case::{Case, Casing};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Entity};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::SharedString;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct AgentProfile {
|
||||
id: AgentProfileId,
|
||||
tool_set: Entity<ToolWorkingSet>,
|
||||
}
|
||||
|
||||
pub type AvailableProfiles = IndexMap<AgentProfileId, SharedString>;
|
||||
|
||||
impl AgentProfile {
|
||||
pub fn new(id: AgentProfileId, tool_set: Entity<ToolWorkingSet>) -> Self {
|
||||
Self { id, tool_set }
|
||||
}
|
||||
|
||||
/// Saves a new profile to the settings.
|
||||
pub fn create(
|
||||
name: String,
|
||||
base_profile_id: Option<AgentProfileId>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &App,
|
||||
) -> AgentProfileId {
|
||||
let id = AgentProfileId(name.to_case(Case::Kebab).into());
|
||||
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
update_settings_file::<AgentSettings>(fs, cx, {
|
||||
let id = id.clone();
|
||||
move |settings, _cx| {
|
||||
settings.create_profile(id, profile_settings).log_err();
|
||||
}
|
||||
});
|
||||
|
||||
id
|
||||
}
|
||||
|
||||
/// Returns a map of AgentProfileIds to their names
|
||||
pub fn available_profiles(cx: &App) -> AvailableProfiles {
|
||||
let mut profiles = AvailableProfiles::default();
|
||||
for (id, profile) in AgentSettings::get_global(cx).profiles.iter() {
|
||||
profiles.insert(id.clone(), profile.name.clone());
|
||||
}
|
||||
profiles
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &AgentProfileId {
|
||||
&self.id
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
self.tool_set
|
||||
.read(cx)
|
||||
.tools(cx)
|
||||
.into_iter()
|
||||
.filter(|tool| Self::is_enabled(settings, tool.source(), tool.name()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
|
||||
match source {
|
||||
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
|
||||
ToolSource::ContextServer { id } => {
|
||||
if settings.enable_all_context_servers {
|
||||
return true;
|
||||
}
|
||||
|
||||
let Some(preset) = settings.context_servers.get(id.as_ref()) else {
|
||||
return false;
|
||||
};
|
||||
*preset.tools.get(name.as_str()).unwrap_or(&false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use agent_settings::ContextServerPreset;
|
||||
use assistant_tool::ToolRegistry;
|
||||
use collections::IndexMap;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use http_client::FakeHttpClient;
|
||||
use project::Project;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use ui::SharedString;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_enabled_built_in_tools_for_profile(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId::default();
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
|
||||
// Provider dependent
|
||||
.filter(|tool| tool != "web_search")
|
||||
.collect::<Vec<_>>();
|
||||
// Plus all registered MCP tools
|
||||
expected_tools.extend(["enabled_mcp_tool".into(), "disabled_mcp_tool".into()]);
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_custom_mcp_settings(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId("custom_mcp".into());
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings.context_servers["mcp"]
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(key, enabled)| enabled.then(|| key.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_only_built_in(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let id = AgentProfileId("write_minus_mcp".into());
|
||||
let profile_settings = cx.read(|cx| {
|
||||
AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.clone()
|
||||
});
|
||||
let tool_set = default_tool_set(cx);
|
||||
|
||||
let profile = AgentProfile::new(id.clone(), tool_set);
|
||||
|
||||
let mut enabled_tools = cx
|
||||
.read(|cx| profile.enabled_tools(cx))
|
||||
.into_iter()
|
||||
.map(|tool| tool.name())
|
||||
.collect::<Vec<_>>();
|
||||
enabled_tools.sort();
|
||||
|
||||
let mut expected_tools = profile_settings
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
|
||||
// Provider dependent
|
||||
.filter(|tool| tool != "web_search")
|
||||
.collect::<Vec<_>>();
|
||||
expected_tools.sort();
|
||||
|
||||
assert_eq!(enabled_tools, expected_tools);
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
ToolRegistry::default_global(cx);
|
||||
assistant_tools::init(FakeHttpClient::with_404_response(), cx);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
let mut agent_settings = AgentSettings::get_global(cx).clone();
|
||||
agent_settings.profiles.insert(
|
||||
AgentProfileId("write_minus_mcp".into()),
|
||||
AgentProfileSettings {
|
||||
name: "write_minus_mcp".into(),
|
||||
enable_all_context_servers: false,
|
||||
..agent_settings.profiles[&AgentProfileId::default()].clone()
|
||||
},
|
||||
);
|
||||
agent_settings.profiles.insert(
|
||||
AgentProfileId("custom_mcp".into()),
|
||||
AgentProfileSettings {
|
||||
name: "mcp".into(),
|
||||
tools: IndexMap::default(),
|
||||
enable_all_context_servers: false,
|
||||
context_servers: IndexMap::from_iter([("mcp".into(), context_server_preset())]),
|
||||
},
|
||||
);
|
||||
AgentSettings::override_global(agent_settings, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn context_server_preset() -> ContextServerPreset {
|
||||
ContextServerPreset {
|
||||
tools: IndexMap::from_iter([
|
||||
("enabled_mcp_tool".into(), true),
|
||||
("disabled_mcp_tool".into(), false),
|
||||
]),
|
||||
}
|
||||
}
|
||||
|
||||
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
|
||||
cx.new(|_| {
|
||||
let mut tool_set = ToolWorkingSet::default();
|
||||
tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")));
|
||||
tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")));
|
||||
tool_set
|
||||
})
|
||||
}
|
||||
|
||||
struct FakeTool {
|
||||
name: String,
|
||||
source: SharedString,
|
||||
}
|
||||
|
||||
impl FakeTool {
|
||||
fn new(name: impl Into<String>, source: impl Into<SharedString>) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
source: source.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tool for FakeTool {
|
||||
fn name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn source(&self) -> ToolSource {
|
||||
ToolSource::ContextServer {
|
||||
id: self.source.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn icon(&self) -> ui::IconName {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self, _input: &serde_json::Value, _cx: &App) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn ui_text(&self, _input: &serde_json::Value) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_input: serde_json::Value,
|
||||
_request: Arc<language_model::LanguageModelRequest>,
|
||||
_project: Entity<Project>,
|
||||
_action_log: Entity<assistant_tool::ActionLog>,
|
||||
_model: Arc<dyn language_model::LanguageModel>,
|
||||
_window: Option<gpui::AnyWindowHandle>,
|
||||
_cx: &mut App,
|
||||
) -> assistant_tool::ToolResult {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn may_perform_edits(&self) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
@@ -765,7 +767,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let source_range = snapshot.anchor_before(state.source_range.start)
|
||||
..snapshot.anchor_after(state.source_range.end);
|
||||
..snapshot.anchor_before(state.source_range.end);
|
||||
|
||||
let thread_store = self.thread_store.clone();
|
||||
let text_thread_store = self.text_thread_store.clone();
|
||||
@@ -910,6 +912,16 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_completion_indices: Vec<usize>,
|
||||
_completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> Task<Result<bool>> {
|
||||
Task::ready(Ok(true))
|
||||
}
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
buffer: &Entity<language::Buffer>,
|
||||
@@ -1065,7 +1077,7 @@ mod tests {
|
||||
use project::{Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{ops::Deref, rc::Rc};
|
||||
use std::ops::Deref;
|
||||
use util::{path, separator};
|
||||
use workspace::{AppState, Item};
|
||||
|
||||
|
||||
@@ -282,18 +282,15 @@ pub fn unordered_thread_entries(
|
||||
text_thread_store: Entity<TextThreadStore>,
|
||||
cx: &App,
|
||||
) -> impl Iterator<Item = (DateTime<Utc>, ThreadContextEntry)> {
|
||||
let threads = thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.map(|thread| {
|
||||
(
|
||||
thread.updated_at,
|
||||
ThreadContextEntry::Thread {
|
||||
id: thread.id.clone(),
|
||||
title: thread.summary.clone(),
|
||||
},
|
||||
)
|
||||
});
|
||||
let threads = thread_store.read(cx).unordered_threads().map(|thread| {
|
||||
(
|
||||
thread.updated_at,
|
||||
ThreadContextEntry::Thread {
|
||||
id: thread.id.clone(),
|
||||
title: thread.summary.clone(),
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let text_threads = text_thread_store
|
||||
.read(cx)
|
||||
@@ -303,7 +300,7 @@ pub fn unordered_thread_entries(
|
||||
context.mtime.to_utc(),
|
||||
ThreadContextEntry::Context {
|
||||
path: context.path.clone(),
|
||||
title: context.title.clone(),
|
||||
title: context.title.clone().into(),
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
@@ -104,15 +104,7 @@ impl Tool for ContextServerTool {
|
||||
tool_name,
|
||||
arguments
|
||||
);
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::CallTool>(
|
||||
context_server::types::CallToolParams {
|
||||
name: tool_name,
|
||||
arguments,
|
||||
meta: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
|
||||
let mut result = String::new();
|
||||
for content in response.content {
|
||||
@@ -123,9 +115,6 @@ impl Tool for ContextServerTool {
|
||||
types::ToolResponseContent::Image { .. } => {
|
||||
log::warn!("Ignoring image content from tool response");
|
||||
}
|
||||
types::ToolResponseContent::Audio { .. } => {
|
||||
log::warn!("Ignoring audio content from tool response");
|
||||
}
|
||||
types::ToolResponseContent::Resource { .. } => {
|
||||
log::warn!("Ignoring resource content from tool response");
|
||||
}
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use std::{collections::VecDeque, path::Path, sync::Arc};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_context_editor::SavedContextMetadata;
|
||||
use anyhow::Context as _;
|
||||
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
|
||||
use chrono::{DateTime, Utc};
|
||||
use gpui::{AsyncApp, Entity, SharedString, Task, prelude::*};
|
||||
use itertools::Itertools;
|
||||
use paths::contexts_dir;
|
||||
use futures::future::{TryFutureExt as _, join_all};
|
||||
use gpui::{Entity, Task, prelude::*};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::future::FutureExt;
|
||||
use std::time::Duration;
|
||||
use ui::App;
|
||||
use ui::{App, SharedString, Window};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{
|
||||
Thread,
|
||||
thread::ThreadId,
|
||||
thread_store::{SerializedThreadMetadata, ThreadStore},
|
||||
};
|
||||
@@ -40,34 +41,52 @@ impl HistoryEntry {
|
||||
HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn title(&self) -> &SharedString {
|
||||
match self {
|
||||
HistoryEntry::Thread(thread) => &thread.summary,
|
||||
HistoryEntry::Context(context) => &context.title,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic identifier for a history entry.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub enum HistoryEntryId {
|
||||
Thread(ThreadId),
|
||||
Context(Arc<Path>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum RecentEntry {
|
||||
Thread(ThreadId, Entity<Thread>),
|
||||
Context(Entity<AssistantContext>),
|
||||
}
|
||||
|
||||
impl PartialEq for RecentEntry {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Thread(l0, _), Self::Thread(r0, _)) => l0 == r0,
|
||||
(Self::Context(l0), Self::Context(r0)) => l0 == r0,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RecentEntry {}
|
||||
|
||||
impl RecentEntry {
|
||||
pub(crate) fn summary(&self, cx: &App) -> SharedString {
|
||||
match self {
|
||||
RecentEntry::Thread(_, thread) => thread.read(cx).summary().or_default(),
|
||||
RecentEntry::Context(context) => context.read(cx).summary().or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializedRecentOpen {
|
||||
enum SerializedRecentEntry {
|
||||
Thread(String),
|
||||
ContextName(String),
|
||||
/// Old format which stores the full path
|
||||
Context(String),
|
||||
}
|
||||
|
||||
pub struct HistoryStore {
|
||||
thread_store: Entity<ThreadStore>,
|
||||
context_store: Entity<assistant_context_editor::ContextStore>,
|
||||
recently_opened_entries: VecDeque<HistoryEntryId>,
|
||||
recently_opened_entries: VecDeque<RecentEntry>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
_save_recently_opened_entries_task: Task<()>,
|
||||
}
|
||||
@@ -76,7 +95,8 @@ impl HistoryStore {
|
||||
pub fn new(
|
||||
thread_store: Entity<ThreadStore>,
|
||||
context_store: Entity<assistant_context_editor::ContextStore>,
|
||||
initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
|
||||
initial_recent_entries: impl IntoIterator<Item = RecentEntry>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let subscriptions = vec![
|
||||
@@ -84,20 +104,68 @@ impl HistoryStore {
|
||||
cx.observe(&context_store, |_, _, cx| cx.notify()),
|
||||
];
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
|
||||
this.update(cx, |this, _| {
|
||||
this.recently_opened_entries
|
||||
.extend(
|
||||
entries.into_iter().take(
|
||||
MAX_RECENTLY_OPENED_ENTRIES
|
||||
.saturating_sub(this.recently_opened_entries.len()),
|
||||
),
|
||||
);
|
||||
window
|
||||
.spawn(cx, {
|
||||
let thread_store = thread_store.downgrade();
|
||||
let context_store = context_store.downgrade();
|
||||
let this = cx.weak_entity();
|
||||
async move |cx| {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let contents = cx
|
||||
.background_spawn(async move { std::fs::read_to_string(path) })
|
||||
.await
|
||||
.ok()?;
|
||||
let entries = serde_json::from_str::<Vec<SerializedRecentEntry>>(&contents)
|
||||
.context("deserializing persisted agent panel navigation history")
|
||||
.log_err()?
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.map(|serialized| match serialized {
|
||||
SerializedRecentEntry::Thread(id) => thread_store
|
||||
.update_in(cx, |thread_store, window, cx| {
|
||||
let thread_id = ThreadId::from(id.as_str());
|
||||
thread_store
|
||||
.open_thread(&thread_id, window, cx)
|
||||
.map_ok(|thread| RecentEntry::Thread(thread_id, thread))
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async {
|
||||
anyhow::bail!("no thread store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
SerializedRecentEntry::Context(id) => context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store
|
||||
.open_local_context(Path::new(&id).into(), cx)
|
||||
.map_ok(RecentEntry::Context)
|
||||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async {
|
||||
anyhow::bail!("no context store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
});
|
||||
let entries = join_all(entries)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_with_level(log::Level::Debug))
|
||||
.collect::<VecDeque<_>>();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this.recently_opened_entries.extend(entries);
|
||||
this.recently_opened_entries
|
||||
.truncate(MAX_RECENTLY_OPENED_ENTRIES);
|
||||
})
|
||||
.ok();
|
||||
|
||||
Some(())
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.detach();
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
thread_store,
|
||||
@@ -116,20 +184,19 @@ impl HistoryStore {
|
||||
return history_entries;
|
||||
}
|
||||
|
||||
history_entries.extend(
|
||||
self.thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.cloned()
|
||||
.map(HistoryEntry::Thread),
|
||||
);
|
||||
history_entries.extend(
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.cloned()
|
||||
.map(HistoryEntry::Context),
|
||||
);
|
||||
for thread in self
|
||||
.thread_store
|
||||
.update(cx, |this, _cx| this.reverse_chronological_threads())
|
||||
{
|
||||
history_entries.push(HistoryEntry::Thread(thread));
|
||||
}
|
||||
|
||||
for context in self
|
||||
.context_store
|
||||
.update(cx, |this, _cx| this.reverse_chronological_contexts())
|
||||
{
|
||||
history_entries.push(HistoryEntry::Context(context));
|
||||
}
|
||||
|
||||
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
|
||||
history_entries
|
||||
@@ -139,62 +206,15 @@ impl HistoryStore {
|
||||
self.entries(cx).into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let thread_entries = self
|
||||
.thread_store
|
||||
.read(cx)
|
||||
.reverse_chronological_threads()
|
||||
.flat_map(|thread| {
|
||||
self.recently_opened_entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(index, entry)| match entry {
|
||||
HistoryEntryId::Thread(id) if &thread.id == id => {
|
||||
Some((index, HistoryEntry::Thread(thread.clone())))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
let context_entries =
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.flat_map(|context| {
|
||||
self.recently_opened_entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(index, entry)| match entry {
|
||||
HistoryEntryId::Context(path) if &context.path == path => {
|
||||
Some((index, HistoryEntry::Context(context.clone())))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
thread_entries
|
||||
.chain(context_entries)
|
||||
// optimization to halt iteration early
|
||||
.take(self.recently_opened_entries.len())
|
||||
.sorted_unstable_by_key(|(index, _)| *index)
|
||||
.map(|(_, entry)| entry)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn save_recently_opened_entries(&mut self, cx: &mut Context<Self>) {
|
||||
let serialized_entries = self
|
||||
.recently_opened_entries
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
HistoryEntryId::Context(path) => path.file_name().map(|file| {
|
||||
SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
|
||||
}),
|
||||
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
|
||||
RecentEntry::Context(context) => Some(SerializedRecentEntry::Context(
|
||||
context.read(cx).path()?.to_str()?.to_owned(),
|
||||
)),
|
||||
RecentEntry::Thread(id, _) => Some(SerializedRecentEntry::Thread(id.to_string())),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -213,33 +233,7 @@ impl HistoryStore {
|
||||
});
|
||||
}
|
||||
|
||||
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
|
||||
cx.background_spawn(async move {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let contents = smol::fs::read_to_string(path).await?;
|
||||
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
|
||||
.context("deserializing persisted agent panel navigation history")?
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.flat_map(|entry| match entry {
|
||||
SerializedRecentOpen::Thread(id) => {
|
||||
Some(HistoryEntryId::Thread(id.as_str().into()))
|
||||
}
|
||||
SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context(
|
||||
contexts_dir().join(file_name).into(),
|
||||
)),
|
||||
SerializedRecentOpen::Context(path) => {
|
||||
Path::new(&path).file_name().map(|file_name| {
|
||||
HistoryEntryId::Context(contexts_dir().join(file_name).into())
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(entries)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context<Self>) {
|
||||
pub fn push_recently_opened_entry(&mut self, entry: RecentEntry, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != &entry);
|
||||
self.recently_opened_entries.push_front(entry);
|
||||
@@ -250,33 +244,24 @@ impl HistoryStore {
|
||||
|
||||
pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries.retain(|entry| match entry {
|
||||
HistoryEntryId::Thread(thread_id) if thread_id == &id => false,
|
||||
RecentEntry::Thread(thread_id, _) if thread_id == &id => false,
|
||||
_ => true,
|
||||
});
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn replace_recently_opened_text_thread(
|
||||
&mut self,
|
||||
old_path: &Path,
|
||||
new_path: &Arc<Path>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
for entry in &mut self.recently_opened_entries {
|
||||
match entry {
|
||||
HistoryEntryId::Context(path) if path.as_ref() == old_path => {
|
||||
*entry = HistoryEntryId::Context(new_path.clone());
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context<Self>) {
|
||||
pub fn remove_recently_opened_entry(&mut self, entry: &RecentEntry, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != entry);
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn recently_opened_entries(&self, _cx: &mut Context<Self>) -> VecDeque<RecentEntry> {
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return VecDeque::new();
|
||||
}
|
||||
|
||||
self.recently_opened_entries.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1011,7 +1011,7 @@ impl InlineAssistant {
|
||||
self.update_editor_highlights(&editor, cx);
|
||||
}
|
||||
} else {
|
||||
entry.get_mut().highlight_updates.send(()).ok();
|
||||
entry.get().highlight_updates.send(()).ok();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1519,7 +1519,7 @@ impl InlineAssistant {
|
||||
struct EditorInlineAssists {
|
||||
assist_ids: Vec<InlineAssistId>,
|
||||
scroll_lock: Option<InlineAssistScrollLock>,
|
||||
highlight_updates: watch::Sender<()>,
|
||||
highlight_updates: async_watch::Sender<()>,
|
||||
_update_highlights: Task<Result<()>>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
}
|
||||
@@ -1531,7 +1531,7 @@ struct InlineAssistScrollLock {
|
||||
|
||||
impl EditorInlineAssists {
|
||||
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = watch::channel(());
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
|
||||
Self {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
@@ -1689,7 +1689,7 @@ impl InlineAssist {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
InlineAssistant::update_global(cx, |this, cx| {
|
||||
if let Some(editor_assists) =
|
||||
this.assists_by_editor.get_mut(&editor.downgrade())
|
||||
this.assists_by_editor.get(&editor.downgrade())
|
||||
{
|
||||
editor_assists.highlight_updates.send(()).ok();
|
||||
}
|
||||
|
||||
@@ -175,7 +175,8 @@ impl MessageEditor {
|
||||
)
|
||||
});
|
||||
|
||||
let incompatible_tools = cx.new(|cx| IncompatibleToolsState::new(thread.clone(), cx));
|
||||
let incompatible_tools =
|
||||
cx.new(|cx| IncompatibleToolsState::new(thread.read(cx).tools().clone(), cx));
|
||||
|
||||
let subscriptions = vec![
|
||||
cx.subscribe_in(&context_strip, window, Self::handle_context_strip_event),
|
||||
@@ -203,8 +204,15 @@ impl MessageEditor {
|
||||
)
|
||||
});
|
||||
|
||||
let profile_selector =
|
||||
cx.new(|cx| ProfileSelector::new(fs, thread.clone(), editor.focus_handle(cx), cx));
|
||||
let profile_selector = cx.new(|cx| {
|
||||
ProfileSelector::new(
|
||||
fs,
|
||||
thread.clone(),
|
||||
thread_store,
|
||||
editor.focus_handle(cx),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
Self {
|
||||
editor: editor.clone(),
|
||||
|
||||
@@ -1,24 +1,26 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent_settings::{AgentDockPosition, AgentProfileId, AgentSettings, builtin_profiles};
|
||||
use agent_settings::{
|
||||
AgentDockPosition, AgentProfile, AgentProfileId, AgentSettings, GroupedAgentProfiles,
|
||||
builtin_profiles,
|
||||
};
|
||||
use fs::Fs;
|
||||
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, prelude::*};
|
||||
use gpui::{Action, Empty, Entity, FocusHandle, Subscription, WeakEntity, prelude::*};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use settings::{Settings as _, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
ContextMenu, ContextMenuEntry, DocumentationSide, PopoverMenu, PopoverMenuHandle, Tooltip,
|
||||
prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{
|
||||
ManageProfiles, Thread, ToggleProfileSelector,
|
||||
agent_profile::{AgentProfile, AvailableProfiles},
|
||||
};
|
||||
use crate::{ManageProfiles, Thread, ThreadStore, ToggleProfileSelector};
|
||||
|
||||
pub struct ProfileSelector {
|
||||
profiles: AvailableProfiles,
|
||||
profiles: GroupedAgentProfiles,
|
||||
fs: Arc<dyn Fs>,
|
||||
thread: Entity<Thread>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
focus_handle: FocusHandle,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
@@ -28,6 +30,7 @@ impl ProfileSelector {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
thread: Entity<Thread>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
focus_handle: FocusHandle,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -36,9 +39,10 @@ impl ProfileSelector {
|
||||
});
|
||||
|
||||
Self {
|
||||
profiles: AgentProfile::available_profiles(cx),
|
||||
profiles: GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx)),
|
||||
fs,
|
||||
thread,
|
||||
thread_store,
|
||||
menu_handle: PopoverMenuHandle::default(),
|
||||
focus_handle,
|
||||
_subscriptions: vec![settings_subscription],
|
||||
@@ -50,7 +54,7 @@ impl ProfileSelector {
|
||||
}
|
||||
|
||||
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
|
||||
self.profiles = AgentProfile::available_profiles(cx);
|
||||
self.profiles = GroupedAgentProfiles::from_settings(AgentSettings::get_global(cx));
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
@@ -60,30 +64,21 @@ impl ProfileSelector {
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let mut found_non_builtin = false;
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if !builtin_profiles::is_builtin(profile_id) {
|
||||
found_non_builtin = true;
|
||||
continue;
|
||||
}
|
||||
for (profile_id, profile) in self.profiles.builtin.iter() {
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
profile,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
|
||||
if found_non_builtin {
|
||||
if !self.profiles.custom.is_empty() {
|
||||
menu = menu.separator().header("Custom Profiles");
|
||||
for (profile_id, profile_name) in self.profiles.iter() {
|
||||
if builtin_profiles::is_builtin(profile_id) {
|
||||
continue;
|
||||
}
|
||||
for (profile_id, profile) in self.profiles.custom.iter() {
|
||||
menu = menu.item(self.menu_entry_for_profile(
|
||||
profile_id.clone(),
|
||||
profile_name,
|
||||
profile,
|
||||
settings,
|
||||
cx,
|
||||
));
|
||||
@@ -104,20 +99,19 @@ impl ProfileSelector {
|
||||
fn menu_entry_for_profile(
|
||||
&self,
|
||||
profile_id: AgentProfileId,
|
||||
profile_name: &SharedString,
|
||||
profile: &AgentProfile,
|
||||
settings: &AgentSettings,
|
||||
cx: &App,
|
||||
_cx: &App,
|
||||
) -> ContextMenuEntry {
|
||||
let documentation = match profile_name.to_lowercase().as_str() {
|
||||
let documentation = match profile.name.to_lowercase().as_str() {
|
||||
builtin_profiles::WRITE => Some("Get help to write anything."),
|
||||
builtin_profiles::ASK => Some("Chat about your codebase."),
|
||||
builtin_profiles::MINIMAL => Some("Chat about anything with no tools."),
|
||||
_ => None,
|
||||
};
|
||||
let thread_profile_id = self.thread.read(cx).profile().id();
|
||||
|
||||
let entry = ContextMenuEntry::new(profile_name.clone())
|
||||
.toggleable(IconPosition::End, &profile_id == thread_profile_id);
|
||||
let entry = ContextMenuEntry::new(profile.name.clone())
|
||||
.toggleable(IconPosition::End, profile_id == settings.default_profile);
|
||||
|
||||
let entry = if let Some(doc_text) = documentation {
|
||||
entry.documentation_aside(documentation_side(settings.dock), move |_| {
|
||||
@@ -129,7 +123,7 @@ impl ProfileSelector {
|
||||
|
||||
entry.handler({
|
||||
let fs = self.fs.clone();
|
||||
let thread = self.thread.clone();
|
||||
let thread_store = self.thread_store.clone();
|
||||
let profile_id = profile_id.clone();
|
||||
move |_window, cx| {
|
||||
update_settings_file::<AgentSettings>(fs.clone(), cx, {
|
||||
@@ -139,9 +133,11 @@ impl ProfileSelector {
|
||||
}
|
||||
});
|
||||
|
||||
thread.update(cx, |this, cx| {
|
||||
this.set_profile(profile_id.clone(), cx);
|
||||
});
|
||||
thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile_by_id(profile_id.clone(), cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -150,7 +146,7 @@ impl ProfileSelector {
|
||||
impl Render for ProfileSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let profile_id = self.thread.read(cx).profile().id();
|
||||
let profile_id = &settings.default_profile;
|
||||
let profile = settings.profiles.get(profile_id);
|
||||
|
||||
let selected_profile = profile
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use agent_settings::{AgentSettings, CompletionMode};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{ActionLog, AnyToolCard, Tool, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -41,7 +41,6 @@ use uuid::Uuid;
|
||||
use zed_llm_client::{CompletionIntent, CompletionRequestStatus};
|
||||
|
||||
use crate::ThreadStore;
|
||||
use crate::agent_profile::AgentProfile;
|
||||
use crate::context::{AgentContext, AgentContextHandle, ContextLoadResult, LoadedContext};
|
||||
use crate::thread_store::{
|
||||
SerializedCrease, SerializedLanguageModel, SerializedMessage, SerializedMessageSegment,
|
||||
@@ -195,20 +194,20 @@ impl MessageSegment {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectSnapshot {
|
||||
pub worktree_snapshots: Vec<WorktreeSnapshot>,
|
||||
pub unsaved_buffer_paths: Vec<String>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WorktreeSnapshot {
|
||||
pub worktree_path: String,
|
||||
pub git_state: Option<GitState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitState {
|
||||
pub remote_url: Option<String>,
|
||||
pub head_sha: Option<String>,
|
||||
@@ -247,7 +246,7 @@ impl LastRestoreCheckpoint {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||
pub enum DetailedSummaryState {
|
||||
#[default]
|
||||
NotGenerated,
|
||||
@@ -361,7 +360,6 @@ pub struct Thread {
|
||||
>,
|
||||
remaining_turns: u32,
|
||||
configured_model: Option<ConfiguredModel>,
|
||||
profile: AgentProfile,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -391,7 +389,7 @@ impl ThreadSummary {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ExceededWindowError {
|
||||
/// Model used when last message exceeded context window
|
||||
model_id: LanguageModelId,
|
||||
@@ -409,7 +407,6 @@ impl Thread {
|
||||
) -> Self {
|
||||
let (detailed_summary_tx, detailed_summary_rx) = postage::watch::channel();
|
||||
let configured_model = LanguageModelRegistry::read_global(cx).default_model();
|
||||
let profile_id = AgentSettings::get_global(cx).default_profile.clone();
|
||||
|
||||
Self {
|
||||
id: ThreadId::new(),
|
||||
@@ -452,7 +449,6 @@ impl Thread {
|
||||
request_callback: None,
|
||||
remaining_turns: u32::MAX,
|
||||
configured_model,
|
||||
profile: AgentProfile::new(profile_id, tools),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -499,9 +495,6 @@ impl Thread {
|
||||
let completion_mode = serialized
|
||||
.completion_mode
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).preferred_completion_mode);
|
||||
let profile_id = serialized
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
|
||||
Self {
|
||||
id,
|
||||
@@ -561,7 +554,7 @@ impl Thread {
|
||||
pending_checkpoint: None,
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
tools: tools.clone(),
|
||||
tools,
|
||||
tool_use,
|
||||
action_log: cx.new(|_| ActionLog::new(project)),
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
@@ -577,7 +570,6 @@ impl Thread {
|
||||
request_callback: None,
|
||||
remaining_turns: u32::MAX,
|
||||
configured_model,
|
||||
profile: AgentProfile::new(profile_id, tools),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -593,17 +585,6 @@ impl Thread {
|
||||
&self.id
|
||||
}
|
||||
|
||||
pub fn profile(&self) -> &AgentProfile {
|
||||
&self.profile
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if &id != self.profile.id() {
|
||||
self.profile = AgentProfile::new(id, self.tools.clone());
|
||||
cx.emit(ThreadEvent::ProfileChanged);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.messages.is_empty()
|
||||
}
|
||||
@@ -938,7 +919,8 @@ impl Thread {
|
||||
model: Arc<dyn LanguageModel>,
|
||||
) -> Vec<LanguageModelRequestTool> {
|
||||
if model.supports_tools() {
|
||||
self.profile
|
||||
self.tools()
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
@@ -1198,7 +1180,6 @@ impl Thread {
|
||||
}),
|
||||
completion_mode: Some(this.completion_mode),
|
||||
tool_use_limit_reached: this.tool_use_limit_reached,
|
||||
profile: Some(this.profile.id().clone()),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -2140,7 +2121,7 @@ impl Thread {
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Thread>,
|
||||
) {
|
||||
let available_tools = self.profile.enabled_tools(cx);
|
||||
let available_tools = self.tools.read(cx).enabled_tools(cx);
|
||||
|
||||
let tool_list = available_tools
|
||||
.iter()
|
||||
@@ -2232,15 +2213,19 @@ impl Thread {
|
||||
) -> Task<()> {
|
||||
let tool_name: Arc<str> = tool.name().into();
|
||||
|
||||
let tool_result = tool.run(
|
||||
input,
|
||||
request,
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
model,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let tool_result = if self.tools.read(cx).is_disabled(&tool.source(), &tool_name) {
|
||||
Task::ready(Err(anyhow!("tool is disabled: {tool_name}"))).into()
|
||||
} else {
|
||||
tool.run(
|
||||
input,
|
||||
request,
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
model,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
};
|
||||
|
||||
// Store the card separately if it exists
|
||||
if let Some(card) = tool_result.card.clone() {
|
||||
@@ -2359,7 +2344,8 @@ impl Thread {
|
||||
let client = self.project.read(cx).client();
|
||||
|
||||
let enabled_tool_names: Vec<String> = self
|
||||
.profile
|
||||
.tools()
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.map(|tool| tool.name())
|
||||
@@ -2872,7 +2858,6 @@ pub enum ThreadEvent {
|
||||
ToolUseLimitReached,
|
||||
CancelEditing,
|
||||
CompletionCanceled,
|
||||
ProfileChanged,
|
||||
}
|
||||
|
||||
impl EventEmitter<ThreadEvent> for Thread {}
|
||||
@@ -2887,7 +2872,7 @@ struct PendingCompletion {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ThreadStore, context::load_context, context_store::ContextStore, thread_store};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelParameters};
|
||||
use agent_settings::{AgentSettings, LanguageModelParameters};
|
||||
use assistant_tool::ToolRegistry;
|
||||
use editor::EditorSettings;
|
||||
use gpui::TestAppContext;
|
||||
@@ -3300,71 +3285,6 @@ fn main() {{
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_storing_profile_setting_per_thread(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let (_workspace, thread_store, thread, _context_store, _model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Check that we are starting with the default profile
|
||||
let profile = cx.read(|cx| thread.read(cx).profile.clone());
|
||||
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
|
||||
assert_eq!(
|
||||
profile,
|
||||
AgentProfile::new(AgentProfileId::default(), tool_set)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_serializing_thread_profile(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(
|
||||
cx,
|
||||
json!({"code.rs": "fn main() {\n println!(\"Hello, world!\");\n}"}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let (_workspace, thread_store, thread, _context_store, _model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Profile gets serialized with default values
|
||||
let serialized = thread
|
||||
.update(cx, |thread, cx| thread.serialize(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(serialized.profile, Some(AgentProfileId::default()));
|
||||
|
||||
let deserialized = cx.update(|cx| {
|
||||
thread.update(cx, |thread, cx| {
|
||||
Thread::deserialize(
|
||||
thread.id.clone(),
|
||||
serialized,
|
||||
thread.project.clone(),
|
||||
thread.tools.clone(),
|
||||
thread.prompt_builder.clone(),
|
||||
thread.project_context.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
let tool_set = cx.read(|cx| thread_store.read(cx).tools());
|
||||
|
||||
assert_eq!(
|
||||
deserialized.profile,
|
||||
AgentProfile::new(AgentProfileId::default(), tool_set)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_temperature_setting(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
@@ -671,7 +671,7 @@ impl RenderOnce for HistoryEntryElement {
|
||||
),
|
||||
HistoryEntry::Context(context) => (
|
||||
context.path.to_string_lossy().to_string(),
|
||||
context.title.clone(),
|
||||
context.title.clone().into(),
|
||||
context.mtime.timestamp(),
|
||||
),
|
||||
};
|
||||
|
||||
@@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use agent_settings::{AgentProfileId, CompletionMode};
|
||||
use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ToolId, ToolWorkingSet};
|
||||
use assistant_tool::{ToolId, ToolSource, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
use context_server::ContextServerId;
|
||||
@@ -25,6 +25,7 @@ use prompt_store::{
|
||||
UserRulesContext, WorktreeContext,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use ui::Window;
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -89,7 +90,7 @@ pub fn init(cx: &mut App) {
|
||||
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
|
||||
|
||||
impl SharedProjectContext {
|
||||
pub fn borrow(&self) -> Ref<'_, Option<ProjectContext>> {
|
||||
pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
|
||||
self.0.borrow()
|
||||
}
|
||||
}
|
||||
@@ -146,7 +147,12 @@ impl ThreadStore {
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> (Self, oneshot::Receiver<()>) {
|
||||
let mut subscriptions = vec![cx.subscribe(&project, Self::handle_project_event)];
|
||||
let mut subscriptions = vec![
|
||||
cx.observe_global::<SettingsStore>(move |this: &mut Self, cx| {
|
||||
this.load_default_profile(cx);
|
||||
}),
|
||||
cx.subscribe(&project, Self::handle_project_event),
|
||||
];
|
||||
|
||||
if let Some(prompt_store) = prompt_store.as_ref() {
|
||||
subscriptions.push(cx.subscribe(
|
||||
@@ -194,6 +200,7 @@ impl ThreadStore {
|
||||
_reload_system_prompt_task: reload_system_prompt_task,
|
||||
_subscriptions: subscriptions,
|
||||
};
|
||||
this.load_default_profile(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
(this, ready_rx)
|
||||
@@ -393,11 +400,16 @@ impl ThreadStore {
|
||||
self.threads.len()
|
||||
}
|
||||
|
||||
pub fn reverse_chronological_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
|
||||
// ordering is from "ORDER BY" in `list_threads`
|
||||
pub fn unordered_threads(&self) -> impl Iterator<Item = &SerializedThreadMetadata> {
|
||||
self.threads.iter()
|
||||
}
|
||||
|
||||
pub fn reverse_chronological_threads(&self) -> Vec<SerializedThreadMetadata> {
|
||||
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
|
||||
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
|
||||
threads
|
||||
}
|
||||
|
||||
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
|
||||
cx.new(|cx| {
|
||||
Thread::new(
|
||||
@@ -508,17 +520,94 @@ impl ThreadStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
cx.subscribe(&context_server_store, Self::handle_context_server_event)
|
||||
.detach();
|
||||
fn load_default_profile(&self, cx: &mut Context<Self>) {
|
||||
let assistant_settings = AgentSettings::get_global(cx);
|
||||
|
||||
// Check for any servers that were already running before the handler was registered
|
||||
for server in context_server_store.read(cx).running_servers() {
|
||||
self.load_context_server_tools(server.id(), context_server_store.clone(), cx);
|
||||
self.load_profile_by_id(assistant_settings.default_profile.clone(), cx);
|
||||
}
|
||||
|
||||
pub fn load_profile_by_id(&self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
let assistant_settings = AgentSettings::get_global(cx);
|
||||
|
||||
if let Some(profile) = assistant_settings.profiles.get(&profile_id) {
|
||||
self.load_profile(profile.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_profile(&self, profile: AgentProfile, cx: &mut Context<Self>) {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable_all_tools(cx);
|
||||
tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
if profile.enable_all_context_servers {
|
||||
for context_server_id in self
|
||||
.project
|
||||
.read(cx)
|
||||
.context_server_store()
|
||||
.read(cx)
|
||||
.all_server_ids()
|
||||
{
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable_source(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.0.into(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
cx.subscribe(
|
||||
&self.project.read(cx).context_server_store(),
|
||||
Self::handle_context_server_event,
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_context_server_event(
|
||||
&mut self,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
@@ -529,71 +618,71 @@ impl ThreadStore {
|
||||
match event {
|
||||
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
|
||||
match status {
|
||||
ContextServerStatus::Starting => {}
|
||||
ContextServerStatus::Running => {
|
||||
self.load_context_server_tools(server_id.clone(), context_server_store, cx);
|
||||
if let Some(server) =
|
||||
context_server_store.read(cx).get_running_server(server_id)
|
||||
{
|
||||
let context_server_manager = context_server_store.clone();
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
let tool_ids = tool_working_set
|
||||
.update(cx, |tool_working_set, _| {
|
||||
tools
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
tool_working_set.insert(Arc::new(
|
||||
ContextServerTool::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(tool_ids) = tool_ids {
|
||||
this.update(cx, |this, cx| {
|
||||
this.context_server_tool_ids
|
||||
.insert(server_id, tool_ids);
|
||||
this.load_default_profile(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
|
||||
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
|
||||
tool_working_set.update(cx, |tool_working_set, _| {
|
||||
tool_working_set.remove(&tool_ids);
|
||||
});
|
||||
self.load_default_profile(cx);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_context_server_tools(
|
||||
&self,
|
||||
server_id: ContextServerId,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
|
||||
return;
|
||||
};
|
||||
let tool_working_set = self.tools.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
|
||||
if let Some(response) = protocol
|
||||
.request::<context_server::types::requests::ListTools>(())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
let tool_ids = tool_working_set
|
||||
.update(cx, |tool_working_set, _| {
|
||||
response
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!("registering context server tool: {:?}", tool.name);
|
||||
tool_working_set.insert(Arc::new(ContextServerTool::new(
|
||||
context_server_store.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
)))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(tool_ids) = tool_ids {
|
||||
this.update(cx, |this, _| {
|
||||
this.context_server_tool_ids.insert(server_id, tool_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -603,7 +692,7 @@ pub struct SerializedThreadMetadata {
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SerializedThread {
|
||||
pub version: String,
|
||||
pub summary: SharedString,
|
||||
@@ -625,11 +714,9 @@ pub struct SerializedThread {
|
||||
pub completion_mode: Option<CompletionMode>,
|
||||
#[serde(default)]
|
||||
pub tool_use_limit_reached: bool,
|
||||
#[serde(default)]
|
||||
pub profile: Option<AgentProfileId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SerializedLanguageModel {
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
@@ -690,15 +777,11 @@ impl SerializedThreadV0_1_0 {
|
||||
messages.push(message);
|
||||
}
|
||||
|
||||
SerializedThread {
|
||||
messages,
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
..self.0
|
||||
}
|
||||
SerializedThread { messages, ..self.0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
@@ -716,7 +799,7 @@ pub struct SerializedMessage {
|
||||
pub is_hidden: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum SerializedMessageSegment {
|
||||
#[serde(rename = "text")]
|
||||
@@ -734,14 +817,14 @@ pub enum SerializedMessageSegment {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SerializedToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
@@ -773,7 +856,6 @@ impl LegacySerializedThread {
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -804,7 +886,7 @@ impl LegacySerializedMessage {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SerializedCrease {
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
@@ -923,7 +1005,7 @@ impl ThreadsDatabase {
|
||||
|
||||
fn bytes_encode(
|
||||
item: &Self::EItem,
|
||||
) -> Result<std::borrow::Cow<'_, [u8]>, heed::BoxedError> {
|
||||
) -> Result<std::borrow::Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(&item.0)
|
||||
.map(std::borrow::Cow::Owned)
|
||||
.map_err(Into::into)
|
||||
@@ -1061,181 +1143,3 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::thread::{DetailedSummaryState, MessageId};
|
||||
use chrono::Utc;
|
||||
use language_model::{Role, TokenUsage};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_legacy_serialized_thread_upgrade() {
|
||||
let updated_at = Utc::now();
|
||||
let legacy_thread = LegacySerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![LegacySerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
text: "Hello, world!".to_string(),
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
}],
|
||||
initial_project_snapshot: None,
|
||||
};
|
||||
|
||||
let upgraded = legacy_thread.upgrade();
|
||||
|
||||
assert_eq!(
|
||||
upgraded,
|
||||
SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Hello, world!".to_string()
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false
|
||||
}],
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialized_threadv0_1_0_upgrade() {
|
||||
let updated_at = Utc::now();
|
||||
let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Use tool_1".to_string(),
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(2),
|
||||
role: Role::Assistant,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "I want to use a tool".to_string(),
|
||||
}],
|
||||
tool_uses: vec![SerializedToolUse {
|
||||
id: "abc".into(),
|
||||
name: "tool_1".into(),
|
||||
input: serde_json::Value::Null,
|
||||
}],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Here is the tool result".to_string(),
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![SerializedToolResult {
|
||||
tool_use_id: "abc".into(),
|
||||
is_error: false,
|
||||
content: LanguageModelToolResultContent::Text("abcdef".into()),
|
||||
output: Some(serde_json::Value::Null),
|
||||
}],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
],
|
||||
version: SerializedThreadV0_1_0::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None,
|
||||
});
|
||||
let upgraded = thread_v0_1_0.upgrade();
|
||||
|
||||
assert_eq!(
|
||||
upgraded,
|
||||
SerializedThread {
|
||||
summary: "Test conversation".into(),
|
||||
updated_at,
|
||||
messages: vec![
|
||||
SerializedMessage {
|
||||
id: MessageId(1),
|
||||
role: Role::User,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "Use tool_1".to_string()
|
||||
}],
|
||||
tool_uses: vec![],
|
||||
tool_results: vec![],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false
|
||||
},
|
||||
SerializedMessage {
|
||||
id: MessageId(2),
|
||||
role: Role::Assistant,
|
||||
segments: vec![SerializedMessageSegment::Text {
|
||||
text: "I want to use a tool".to_string(),
|
||||
}],
|
||||
tool_uses: vec![SerializedToolUse {
|
||||
id: "abc".into(),
|
||||
name: "tool_1".into(),
|
||||
input: serde_json::Value::Null,
|
||||
}],
|
||||
tool_results: vec![SerializedToolResult {
|
||||
tool_use_id: "abc".into(),
|
||||
is_error: false,
|
||||
content: LanguageModelToolResultContent::Text("abcdef".into()),
|
||||
output: Some(serde_json::Value::Null),
|
||||
}],
|
||||
context: "".to_string(),
|
||||
creases: vec![],
|
||||
is_hidden: false,
|
||||
},
|
||||
],
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
initial_project_snapshot: None,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
request_token_usage: vec![],
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
model: None,
|
||||
completion_mode: None,
|
||||
tool_use_limit_reached: false,
|
||||
profile: None
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,30 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{Tool, ToolSource};
|
||||
use assistant_tool::{Tool, ToolSource, ToolWorkingSet, ToolWorkingSetEvent};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Context, Entity, IntoElement, Render, Subscription, Window};
|
||||
use language_model::{LanguageModel, LanguageModelToolSchemaFormat};
|
||||
use ui::prelude::*;
|
||||
|
||||
use crate::{Thread, ThreadEvent};
|
||||
|
||||
pub struct IncompatibleToolsState {
|
||||
cache: HashMap<LanguageModelToolSchemaFormat, Vec<Arc<dyn Tool>>>,
|
||||
thread: Entity<Thread>,
|
||||
_thread_subscription: Subscription,
|
||||
tool_working_set: Entity<ToolWorkingSet>,
|
||||
_tool_working_set_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl IncompatibleToolsState {
|
||||
pub fn new(thread: Entity<Thread>, cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(tool_working_set: Entity<ToolWorkingSet>, cx: &mut Context<Self>) -> Self {
|
||||
let _tool_working_set_subscription =
|
||||
cx.subscribe(&thread, |this, _, event, _| match event {
|
||||
ThreadEvent::ProfileChanged => {
|
||||
cx.subscribe(&tool_working_set, |this, _, event, _| match event {
|
||||
ToolWorkingSetEvent::EnabledToolsChanged => {
|
||||
this.cache.clear();
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
Self {
|
||||
cache: HashMap::default(),
|
||||
thread,
|
||||
_thread_subscription: _tool_working_set_subscription,
|
||||
tool_working_set,
|
||||
_tool_working_set_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,9 +36,8 @@ impl IncompatibleToolsState {
|
||||
self.cache
|
||||
.entry(model.tool_input_format())
|
||||
.or_insert_with(|| {
|
||||
self.thread
|
||||
self.tool_working_set
|
||||
.read(cx)
|
||||
.profile()
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.filter(|tool| tool.input_schema(model.tool_input_format()).is_err())
|
||||
|
||||
@@ -16,6 +16,7 @@ anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
indexmap.workspace = true
|
||||
language_model.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
|
||||
@@ -17,6 +17,29 @@ pub mod builtin_profiles {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct GroupedAgentProfiles {
|
||||
pub builtin: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub custom: IndexMap<AgentProfileId, AgentProfile>,
|
||||
}
|
||||
|
||||
impl GroupedAgentProfiles {
|
||||
pub fn from_settings(settings: &crate::AgentSettings) -> Self {
|
||||
let mut builtin = IndexMap::default();
|
||||
let mut custom = IndexMap::default();
|
||||
|
||||
for (profile_id, profile) in settings.profiles.clone() {
|
||||
if builtin_profiles::is_builtin(&profile_id) {
|
||||
builtin.insert(profile_id, profile);
|
||||
} else {
|
||||
custom.insert(profile_id, profile);
|
||||
}
|
||||
}
|
||||
|
||||
Self { builtin, custom }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AgentProfileId(pub Arc<str>);
|
||||
|
||||
@@ -40,7 +63,7 @@ impl Default for AgentProfileId {
|
||||
|
||||
/// A profile for the Zed Agent that controls its behavior.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AgentProfileSettings {
|
||||
pub struct AgentProfile {
|
||||
/// The name of the profile.
|
||||
pub name: SharedString,
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
|
||||
@@ -102,7 +102,7 @@ pub struct AgentSettings {
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub default_profile: AgentProfileId,
|
||||
pub default_view: DefaultView,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfileSettings>,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub always_allow_tool_actions: bool,
|
||||
pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
|
||||
pub play_sound_when_agent_done: bool,
|
||||
@@ -531,7 +531,7 @@ impl AgentSettingsContent {
|
||||
pub fn create_profile(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
profile_settings: AgentProfileSettings,
|
||||
profile: AgentProfile,
|
||||
) -> Result<()> {
|
||||
self.v2_setting(|settings| {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
@@ -542,10 +542,10 @@ impl AgentSettingsContent {
|
||||
profiles.insert(
|
||||
profile_id,
|
||||
AgentProfileContent {
|
||||
name: profile_settings.name.into(),
|
||||
tools: profile_settings.tools,
|
||||
enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
|
||||
context_servers: profile_settings
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
enable_all_context_servers: Some(profile.enable_all_context_servers),
|
||||
context_servers: profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
@@ -910,7 +910,7 @@ impl Settings for AgentSettings {
|
||||
.extend(profiles.into_iter().map(|(id, profile)| {
|
||||
(
|
||||
id,
|
||||
AgentProfileSettings {
|
||||
AgentProfile {
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
enable_all_context_servers: profile
|
||||
|
||||
@@ -11,7 +11,7 @@ use assistant_slash_commands::FileCommandMetadata;
|
||||
use client::{self, proto, telemetry::Telemetry};
|
||||
use clock::ReplicaId;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::{Fs, RenameOptions};
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use futures::{FutureExt, StreamExt, future::Shared};
|
||||
use gpui::{
|
||||
App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription,
|
||||
@@ -452,10 +452,6 @@ pub enum ContextEvent {
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
SummaryGenerated,
|
||||
PathChanged {
|
||||
old_path: Option<Arc<Path>>,
|
||||
new_path: Arc<Path>,
|
||||
},
|
||||
StreamedCompletion,
|
||||
StartedThoughtProcess(Range<language::Anchor>),
|
||||
EndedThoughtProcess(language::Anchor),
|
||||
@@ -2898,34 +2894,22 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
fs.create_dir(contexts_dir().as_ref()).await?;
|
||||
|
||||
// rename before write ensures that only one file exists
|
||||
if let Some(old_path) = old_path.as_ref() {
|
||||
fs.atomic_write(new_path.clone(), serde_json::to_string(&context).unwrap())
|
||||
.await?;
|
||||
if let Some(old_path) = old_path {
|
||||
if new_path.as_path() != old_path.as_ref() {
|
||||
fs.rename(
|
||||
fs.remove_file(
|
||||
&old_path,
|
||||
&new_path,
|
||||
RenameOptions {
|
||||
overwrite: true,
|
||||
ignore_if_exists: true,
|
||||
RemoveOptions {
|
||||
recursive: false,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
// update path before write in case it fails
|
||||
this.update(cx, {
|
||||
let new_path: Arc<Path> = new_path.clone().into();
|
||||
move |this, cx| {
|
||||
this.path = Some(new_path.clone());
|
||||
cx.emit(ContextEvent::PathChanged { old_path, new_path });
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
fs.atomic_write(new_path, serde_json::to_string(&context).unwrap())
|
||||
.await?;
|
||||
this.update(cx, |this, _| this.path = Some(new_path.into()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -3293,7 +3277,7 @@ impl SavedContextV0_1_0 {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SavedContextMetadata {
|
||||
pub title: SharedString,
|
||||
pub title: String,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: chrono::DateTime<chrono::Local>,
|
||||
}
|
||||
|
||||
@@ -580,7 +580,6 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
ContextEvent::SummaryGenerated => {}
|
||||
ContextEvent::PathChanged { .. } => {}
|
||||
ContextEvent::StartedThoughtProcess(range) => {
|
||||
let creases = self.insert_thought_process_output_sections(
|
||||
[(
|
||||
|
||||
@@ -347,6 +347,12 @@ impl ContextStore {
|
||||
self.contexts_metadata.iter()
|
||||
}
|
||||
|
||||
pub fn reverse_chronological_contexts(&self) -> Vec<SavedContextMetadata> {
|
||||
let mut contexts = self.contexts_metadata.iter().cloned().collect::<Vec<_>>();
|
||||
contexts.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.mtime));
|
||||
contexts
|
||||
}
|
||||
|
||||
pub fn create(&mut self, cx: &mut Context<Self>) -> Entity<AssistantContext> {
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
@@ -612,16 +618,6 @@ impl ContextStore {
|
||||
ContextEvent::SummaryChanged => {
|
||||
self.advertise_contexts(cx);
|
||||
}
|
||||
ContextEvent::PathChanged { old_path, new_path } => {
|
||||
if let Some(old_path) = old_path.as_ref() {
|
||||
for metadata in &mut self.contexts_metadata {
|
||||
if &metadata.path == old_path {
|
||||
metadata.path = new_path.clone();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ContextEvent::Operation(operation) => {
|
||||
let context_id = context.read(cx).id().to_proto();
|
||||
let operation = operation.to_proto();
|
||||
@@ -796,7 +792,7 @@ impl ContextStore {
|
||||
.next()
|
||||
{
|
||||
contexts.push(SavedContextMetadata {
|
||||
title: title.to_string().into(),
|
||||
title: title.to_string(),
|
||||
path: path.into(),
|
||||
mtime: metadata.mtime.timestamp_for_user().into(),
|
||||
});
|
||||
@@ -813,37 +809,74 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
cx.subscribe(&context_server_store, Self::handle_context_server_event)
|
||||
.detach();
|
||||
|
||||
// Check for any servers that were already running before the handler was registered
|
||||
for server in context_server_store.read(cx).running_servers() {
|
||||
self.load_context_server_slash_commands(server.id(), context_server_store.clone(), cx);
|
||||
}
|
||||
cx.subscribe(
|
||||
&self.project.read(cx).context_server_store(),
|
||||
Self::handle_context_server_event,
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_context_server_event(
|
||||
&mut self,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
context_server_manager: Entity<ContextServerStore>,
|
||||
event: &project::context_server_store::Event,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let slash_command_working_set = self.slash_commands.clone();
|
||||
match event {
|
||||
project::context_server_store::Event::ServerStatusChanged { server_id, status } => {
|
||||
match status {
|
||||
ContextServerStatus::Running => {
|
||||
self.load_context_server_slash_commands(
|
||||
server_id.clone(),
|
||||
context_server_store.clone(),
|
||||
cx,
|
||||
);
|
||||
if let Some(server) = context_server_manager
|
||||
.read(cx)
|
||||
.get_running_server(server_id)
|
||||
{
|
||||
let context_server_manager = context_server_manager.clone();
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
let slash_command_ids = prompts
|
||||
.into_iter()
|
||||
.filter(assistant_slash_commands::acceptable_prompt)
|
||||
.map(|prompt| {
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
slash_command_working_set.insert(Arc::new(
|
||||
assistant_slash_commands::ContextServerSlashCommand::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
prompt,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update( cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
ContextServerStatus::Stopped | ContextServerStatus::Error(_) => {
|
||||
if let Some(slash_command_ids) =
|
||||
self.context_server_slash_command_ids.remove(server_id)
|
||||
{
|
||||
self.slash_commands.remove(&slash_command_ids);
|
||||
slash_command_working_set.remove(&slash_command_ids);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -851,52 +884,4 @@ impl ContextStore {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_context_server_slash_commands(
|
||||
&self,
|
||||
server_id: ContextServerId,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(server) = context_server_store.read(cx).get_running_server(&server_id) else {
|
||||
return;
|
||||
};
|
||||
let slash_command_working_set = self.slash_commands.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Prompts) {
|
||||
if let Some(response) = protocol
|
||||
.request::<context_server::types::requests::PromptsList>(())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
let slash_command_ids = response
|
||||
.prompts
|
||||
.into_iter()
|
||||
.filter(assistant_slash_commands::acceptable_prompt)
|
||||
.map(|prompt| {
|
||||
log::info!("registering context server command: {:?}", prompt.name);
|
||||
slash_command_working_set.insert(Arc::new(
|
||||
assistant_slash_commands::ContextServerSlashCommand::new(
|
||||
context_server_store.clone(),
|
||||
server.id(),
|
||||
prompt,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ use parking_lot::Mutex;
|
||||
use project::{CompletionIntent, CompletionSource, lsp_store::CompletionDocumentation};
|
||||
use rope::Point;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
ops::Range,
|
||||
rc::Rc,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
@@ -238,14 +240,13 @@ impl SlashCommandCompletionProvider {
|
||||
|
||||
Ok(vec![project::CompletionResponse {
|
||||
completions,
|
||||
// TODO: Could have slash commands indicate whether their completions are incomplete.
|
||||
is_incomplete: true,
|
||||
is_incomplete: false,
|
||||
}])
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(vec![project::CompletionResponse {
|
||||
completions: Vec::new(),
|
||||
is_incomplete: true,
|
||||
is_incomplete: false,
|
||||
}]))
|
||||
}
|
||||
}
|
||||
@@ -274,17 +275,17 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
position.row,
|
||||
call.arguments.last().map_or(call.name.end, |arg| arg.end) as u32,
|
||||
);
|
||||
let command_range = buffer.anchor_before(command_range_start)
|
||||
let command_range = buffer.anchor_after(command_range_start)
|
||||
..buffer.anchor_after(command_range_end);
|
||||
|
||||
let name = line[call.name.clone()].to_string();
|
||||
let (arguments, last_argument_range) = if let Some(argument) = call.arguments.last()
|
||||
{
|
||||
let last_arg_start =
|
||||
buffer.anchor_before(Point::new(position.row, argument.start as u32));
|
||||
buffer.anchor_after(Point::new(position.row, argument.start as u32));
|
||||
let first_arg_start = call.arguments.first().expect("we have the last element");
|
||||
let first_arg_start = buffer
|
||||
.anchor_before(Point::new(position.row, first_arg_start.start as u32));
|
||||
let first_arg_start =
|
||||
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
|
||||
let arguments = call
|
||||
.arguments
|
||||
.into_iter()
|
||||
@@ -297,7 +298,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
)
|
||||
} else {
|
||||
let start =
|
||||
buffer.anchor_before(Point::new(position.row, call.name.start as u32));
|
||||
buffer.anchor_after(Point::new(position.row, call.name.start as u32));
|
||||
(None, start..buffer_position)
|
||||
};
|
||||
|
||||
@@ -325,6 +326,16 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_: Entity<Buffer>,
|
||||
_: Vec<usize>,
|
||||
_: Rc<RefCell<Box<[project::Completion]>>>,
|
||||
_: &mut Context<Editor>,
|
||||
) -> Task<Result<bool>> {
|
||||
Task::ready(Ok(true))
|
||||
}
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
buffer: &Entity<Buffer>,
|
||||
|
||||
@@ -86,26 +86,20 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::CompletionComplete>(
|
||||
context_server::types::CompletionCompleteParams {
|
||||
reference: context_server::types::CompletionReference::Prompt(
|
||||
context_server::types::PromptReference {
|
||||
ty: context_server::types::PromptReferenceType::Prompt,
|
||||
name: prompt_name,
|
||||
},
|
||||
),
|
||||
argument: context_server::types::CompletionArgument {
|
||||
name: arg_name,
|
||||
value: arg_value,
|
||||
let completion_result = protocol
|
||||
.completion(
|
||||
context_server::types::CompletionReference::Prompt(
|
||||
context_server::types::PromptReference {
|
||||
r#type: context_server::types::PromptReferenceType::Prompt,
|
||||
name: prompt_name,
|
||||
},
|
||||
meta: None,
|
||||
},
|
||||
),
|
||||
arg_name,
|
||||
arg_value,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let completions = response
|
||||
.completion
|
||||
let completions = completion_result
|
||||
.values
|
||||
.into_iter()
|
||||
.map(|value| ArgumentCompletion {
|
||||
@@ -144,18 +138,10 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
if let Some(server) = store.get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
let response = protocol
|
||||
.request::<context_server::types::requests::PromptsGet>(
|
||||
context_server::types::PromptsGetParams {
|
||||
name: prompt_name.clone(),
|
||||
arguments: Some(prompt_args),
|
||||
meta: None,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
response
|
||||
result
|
||||
.messages
|
||||
.iter()
|
||||
.all(|msg| matches!(msg.role, context_server::types::Role::User)),
|
||||
@@ -163,7 +149,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
);
|
||||
|
||||
// Extract text from user messages into a single prompt string
|
||||
let mut prompt = response
|
||||
let mut prompt = result
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter_map(|msg| match msg.content {
|
||||
@@ -181,7 +167,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
range: 0..(prompt.len()),
|
||||
icon: IconName::ZedAssistant,
|
||||
label: SharedString::from(
|
||||
response
|
||||
result
|
||||
.description
|
||||
.unwrap_or(format!("Result from {}", prompt_name)),
|
||||
),
|
||||
|
||||
@@ -13,6 +13,7 @@ path = "src/assistant_tool.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-watch.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -29,7 +30,6 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
|
||||
@@ -204,7 +204,7 @@ impl ActionLog {
|
||||
git_store.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
|
||||
})?;
|
||||
|
||||
let (mut git_diff_updates_tx, mut git_diff_updates_rx) = watch::channel(());
|
||||
let (git_diff_updates_tx, mut git_diff_updates_rx) = async_watch::channel(());
|
||||
let _repo_subscription =
|
||||
if let Some((git_diff, (buffer_repo, _))) = git_diff.as_ref().zip(buffer_repo) {
|
||||
cx.update(|cx| {
|
||||
|
||||
@@ -214,7 +214,7 @@ pub trait Tool: 'static + Send + Sync {
|
||||
ToolSource::Native
|
||||
}
|
||||
|
||||
/// Returns true if the tool needs the users's confirmation
|
||||
/// Returns true iff the tool needs the users's confirmation
|
||||
/// before having permission to run.
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
|
||||
|
||||
|
||||
@@ -46,19 +46,15 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
);
|
||||
}
|
||||
|
||||
const KEYS_TO_REMOVE: [(&str, fn(&Value) -> bool); 5] = [
|
||||
("format", |value| value.is_string()),
|
||||
("additionalProperties", |value| value.is_boolean()),
|
||||
("exclusiveMinimum", |value| value.is_number()),
|
||||
("exclusiveMaximum", |value| value.is_number()),
|
||||
("optional", |value| value.is_boolean()),
|
||||
const KEYS_TO_REMOVE: [&str; 5] = [
|
||||
"format",
|
||||
"additionalProperties",
|
||||
"exclusiveMinimum",
|
||||
"exclusiveMaximum",
|
||||
"optional",
|
||||
];
|
||||
for (key, predicate) in KEYS_TO_REMOVE {
|
||||
if let Some(value) = obj.get(key) {
|
||||
if predicate(value) {
|
||||
obj.remove(key);
|
||||
}
|
||||
}
|
||||
for key in KEYS_TO_REMOVE {
|
||||
obj.remove(key);
|
||||
}
|
||||
|
||||
// If a type is not specified for an input parameter, add a default type
|
||||
@@ -157,24 +153,6 @@ mod tests {
|
||||
"type": "integer"
|
||||
})
|
||||
);
|
||||
|
||||
// Ensure that we do not remove keys that are actually supported (e.g. "format" can just be used as another property)
|
||||
let mut json = json!({
|
||||
"description": "A test field",
|
||||
"type": "integer",
|
||||
"format": {},
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field",
|
||||
"type": "integer",
|
||||
"format": {},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::{HashMap, IndexMap};
|
||||
use gpui::App;
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use gpui::{App, Context, EventEmitter};
|
||||
|
||||
use crate::{Tool, ToolRegistry, ToolSource};
|
||||
|
||||
@@ -13,9 +13,17 @@ pub struct ToolId(usize);
|
||||
pub struct ToolWorkingSet {
|
||||
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
|
||||
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
|
||||
enabled_sources: HashSet<ToolSource>,
|
||||
enabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
|
||||
next_tool_id: ToolId,
|
||||
}
|
||||
|
||||
pub enum ToolWorkingSetEvent {
|
||||
EnabledToolsChanged,
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolWorkingSetEvent> for ToolWorkingSet {}
|
||||
|
||||
impl ToolWorkingSet {
|
||||
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
|
||||
self.context_server_tools_by_name
|
||||
@@ -49,6 +57,42 @@ impl ToolWorkingSet {
|
||||
tools_by_source
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let all_tools = self.tools(cx);
|
||||
|
||||
all_tools
|
||||
.into_iter()
|
||||
.filter(|tool| self.is_enabled(&tool.source(), &tool.name().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&mut self, cx: &mut Context<Self>) {
|
||||
self.enabled_tools_by_source.clear();
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn enable_source(&mut self, source: ToolSource, cx: &mut Context<Self>) {
|
||||
self.enabled_sources.insert(source.clone());
|
||||
|
||||
let tools_by_source = self.tools_by_source(cx);
|
||||
if let Some(tools) = tools_by_source.get(&source) {
|
||||
self.enabled_tools_by_source.insert(
|
||||
source,
|
||||
tools
|
||||
.into_iter()
|
||||
.map(|tool| tool.name().into())
|
||||
.collect::<HashSet<_>>(),
|
||||
);
|
||||
}
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable_source(&mut self, source: &ToolSource, cx: &mut Context<Self>) {
|
||||
self.enabled_sources.remove(source);
|
||||
self.enabled_tools_by_source.remove(source);
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let tool_id = self.next_tool_id;
|
||||
self.next_tool_id.0 += 1;
|
||||
@@ -58,6 +102,42 @@ impl ToolWorkingSet {
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.enabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |enabled_tools| enabled_tools.contains(name))
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_enabled(source, name)
|
||||
}
|
||||
|
||||
pub fn enable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_enable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_enable.into_iter().cloned());
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_disable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_disable.contains(name));
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
|
||||
self.context_server_tools_by_id
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
|
||||
@@ -18,6 +18,7 @@ eval = []
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-watch.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -57,7 +58,6 @@ terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
web_search.workspace = true
|
||||
which.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -420,12 +420,12 @@ impl EditAgent {
|
||||
cx: &mut AsyncApp,
|
||||
) -> (
|
||||
Task<Result<(T, Vec<ResolvedOldText>)>>,
|
||||
watch::Receiver<Option<Range<usize>>>,
|
||||
async_watch::Receiver<Option<Range<usize>>>,
|
||||
)
|
||||
where
|
||||
T: 'static + Send + Unpin + Stream<Item = Result<EditParserEvent>>,
|
||||
{
|
||||
let (mut old_range_tx, old_range_rx) = watch::channel(None);
|
||||
let (old_range_tx, old_range_rx) = async_watch::channel(None);
|
||||
let task = cx.background_spawn(async move {
|
||||
let mut matcher = StreamingFuzzyMatcher::new(snapshot);
|
||||
while let Some(edit_event) = edit_events.next().await {
|
||||
|
||||
@@ -39,7 +39,7 @@ fn eval_extract_handle_command_output() {
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
// claude-3.7-sonnet | 0.98
|
||||
// gemini-2.5-pro-06-05 | 0.77
|
||||
// gemini-2.5-pro | 0.86
|
||||
// gemini-2.5-flash | 0.11
|
||||
// gpt-4.1 | 1.00
|
||||
|
||||
@@ -58,7 +58,6 @@ fn eval_extract_handle_command_output() {
|
||||
eval(
|
||||
100,
|
||||
0.7, // Taking the lower bar for Gemini
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -117,7 +116,6 @@ fn eval_delete_run_git_blame() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -180,7 +178,6 @@ fn eval_translate_doc_comments() {
|
||||
eval(
|
||||
200,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -244,7 +241,6 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -369,7 +365,6 @@ fn eval_disable_cursor_blinking() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Let's research how to cursor blinking works.")]),
|
||||
@@ -453,9 +448,6 @@ fn eval_from_pixels_constructor() {
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
// For whatever reason, this eval produces more mismatched tags.
|
||||
// Increasing for now, let's see if we can bring this down.
|
||||
0.2,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -656,7 +648,6 @@ fn eval_zode() {
|
||||
eval(
|
||||
50,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]),
|
||||
@@ -763,7 +754,6 @@ fn eval_add_overwrite_test() {
|
||||
eval(
|
||||
200,
|
||||
0.5, // TODO: make this eval better
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -1003,7 +993,6 @@ fn eval_create_empty_file() {
|
||||
eval(
|
||||
100,
|
||||
0.99,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Create a second empty todo file ")]),
|
||||
@@ -1290,12 +1279,7 @@ impl EvalAssertion {
|
||||
}
|
||||
}
|
||||
|
||||
fn eval(
|
||||
iterations: usize,
|
||||
expected_pass_ratio: f32,
|
||||
mismatched_tag_threshold: f32,
|
||||
mut eval: EvalInput,
|
||||
) {
|
||||
fn eval(iterations: usize, expected_pass_ratio: f32, mut eval: EvalInput) {
|
||||
let mut evaluated_count = 0;
|
||||
let mut failed_count = 0;
|
||||
report_progress(evaluated_count, failed_count, iterations);
|
||||
@@ -1367,7 +1351,7 @@ fn eval(
|
||||
|
||||
let mismatched_tag_ratio =
|
||||
cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32;
|
||||
if mismatched_tag_ratio > mismatched_tag_threshold {
|
||||
if mismatched_tag_ratio > 0.10 {
|
||||
for eval_output in eval_outputs {
|
||||
println!("{}", eval_output);
|
||||
}
|
||||
|
||||
@@ -498,7 +498,7 @@ client.with_options(max_retries=5).messages.create(
|
||||
### Timeouts
|
||||
|
||||
By default requests time out after 10 minutes. You can configure this with a `timeout` option,
|
||||
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object:
|
||||
which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object:
|
||||
|
||||
```python
|
||||
from anthropic import Anthropic
|
||||
|
||||
@@ -111,7 +111,7 @@ pub struct ChannelMembership {
|
||||
pub role: proto::ChannelRole,
|
||||
}
|
||||
impl ChannelMembership {
|
||||
pub fn sort_key(&self) -> MembershipSortKey<'_> {
|
||||
pub fn sort_key(&self) -> MembershipSortKey {
|
||||
MembershipSortKey {
|
||||
role_order: match self.role {
|
||||
proto::ChannelRole::Admin => 0,
|
||||
|
||||
@@ -32,7 +32,7 @@ impl ChannelIndex {
|
||||
.retain(|channel_id| !channels.contains(channel_id));
|
||||
}
|
||||
|
||||
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard<'_> {
|
||||
pub fn bulk_insert(&mut self) -> ChannelPathsInsertGuard {
|
||||
ChannelPathsInsertGuard {
|
||||
channels_ordered: &mut self.channels_ordered,
|
||||
channels_by_id: &mut self.channels_by_id,
|
||||
|
||||
@@ -39,7 +39,7 @@ enum ProxyType<'t> {
|
||||
HttpProxy(HttpProxyType<'t>),
|
||||
}
|
||||
|
||||
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)> {
|
||||
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType)> {
|
||||
let scheme = proxy.scheme();
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
|
||||
@@ -80,6 +80,7 @@ zed_llm_client.workspace = true
|
||||
agent_settings.workspace = true
|
||||
assistant_context_editor.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-trait.workspace = true
|
||||
audio.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
|
||||
@@ -312,7 +312,6 @@ impl Server {
|
||||
.add_request_handler(
|
||||
forward_read_only_project_request::<proto::LanguageServerIdForName>,
|
||||
)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetDocumentDiagnostics>)
|
||||
.add_request_handler(
|
||||
forward_mutating_project_request::<proto::RegisterBufferWithLanguageServers>,
|
||||
)
|
||||
@@ -355,9 +354,6 @@ impl Server {
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateDiffBases>)
|
||||
.add_message_handler(
|
||||
broadcast_project_message_from_host::<proto::PullWorkspaceDiagnostics>,
|
||||
)
|
||||
.add_request_handler(get_users)
|
||||
.add_request_handler(fuzzy_search_users)
|
||||
.add_request_handler(request_contact)
|
||||
|
||||
@@ -7,7 +7,7 @@ use editor::{
|
||||
Editor, RowInfo,
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst,
|
||||
ExpandMacroRecursively, Redo, Rename, SelectAll, ToggleCodeActions, Undo,
|
||||
ExpandMacroRecursively, Redo, Rename, ToggleCodeActions, Undo,
|
||||
},
|
||||
test::{
|
||||
editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
@@ -2712,7 +2712,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(params.position, lsp::Position::new(0, 0));
|
||||
assert_eq!(params.position, lsp::Position::new(0, 0),);
|
||||
Ok(Some(ExpandedMacro {
|
||||
name: "test_macro_name".to_string(),
|
||||
expansion: "test_macro_expansion on the host".to_string(),
|
||||
@@ -2747,11 +2747,7 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.position,
|
||||
lsp::Position::new(0, 12),
|
||||
"editor_b has selected the entire text and should query for a different position"
|
||||
);
|
||||
assert_eq!(params.position, lsp::Position::new(0, 0),);
|
||||
Ok(Some(ExpandedMacro {
|
||||
name: "test_macro_name".to_string(),
|
||||
expansion: "test_macro_expansion on the client".to_string(),
|
||||
@@ -2760,7 +2756,6 @@ async fn test_client_can_query_lsp_ext(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
);
|
||||
|
||||
editor_b.update_in(cx_b, |editor, window, cx| {
|
||||
editor.select_all(&SelectAll, window, cx);
|
||||
expand_macro_recursively(editor, &ExpandMacroRecursively, window, cx)
|
||||
});
|
||||
expand_request_b.next().await.unwrap();
|
||||
|
||||
@@ -20,8 +20,8 @@ use gpui::{
|
||||
UpdateGlobal, px, size,
|
||||
};
|
||||
use language::{
|
||||
Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
|
||||
LineEnding, OffsetRangeExt, Point, Rope,
|
||||
language_settings::{
|
||||
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
|
||||
},
|
||||
@@ -4237,8 +4237,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
message: "message 1".to_string(),
|
||||
severity: lsp::DiagnosticSeverity::ERROR,
|
||||
is_primary: true,
|
||||
source_kind: DiagnosticSourceKind::Pushed,
|
||||
..Diagnostic::default()
|
||||
..Default::default()
|
||||
}
|
||||
},
|
||||
DiagnosticEntry {
|
||||
@@ -4248,8 +4247,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
severity: lsp::DiagnosticSeverity::WARNING,
|
||||
message: "message 2".to_string(),
|
||||
is_primary: true,
|
||||
source_kind: DiagnosticSourceKind::Pushed,
|
||||
..Diagnostic::default()
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -4261,7 +4259,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: Vec::new(),
|
||||
diagnostics: vec![],
|
||||
},
|
||||
);
|
||||
executor.run_until_parked();
|
||||
|
||||
@@ -15,6 +15,7 @@ use language::{
|
||||
use project::{Completion, CompletionResponse, CompletionSource, search::SearchQuery};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
ops::Range,
|
||||
rc::Rc,
|
||||
sync::{Arc, LazyLock},
|
||||
@@ -72,6 +73,16 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_completion_indices: Vec<usize>,
|
||||
_completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> Task<anyhow::Result<bool>> {
|
||||
Task::ready(Ok(false))
|
||||
}
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
_buffer: &Entity<Buffer>,
|
||||
@@ -244,7 +255,7 @@ impl MessageEditor {
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
let completion_response = Self::completions_for_candidates(
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
&candidates,
|
||||
@@ -262,7 +273,7 @@ impl MessageEditor {
|
||||
{
|
||||
if !candidates.is_empty() {
|
||||
return cx.spawn(async move |_, cx| {
|
||||
let completion_response = Self::completions_for_candidates(
|
||||
let completion_response = Self::resolve_completions_for_candidates(
|
||||
&cx,
|
||||
query.as_str(),
|
||||
candidates,
|
||||
@@ -281,7 +292,7 @@ impl MessageEditor {
|
||||
}]))
|
||||
}
|
||||
|
||||
async fn completions_for_candidates(
|
||||
async fn resolve_completions_for_candidates(
|
||||
cx: &AsyncApp,
|
||||
query: &str,
|
||||
candidates: &[StringMatchCandidate],
|
||||
|
||||
@@ -11,9 +11,6 @@ workspace = true
|
||||
[lib]
|
||||
path = "src/context_server.rs"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
pub mod client;
|
||||
pub mod protocol;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
pub mod transport;
|
||||
pub mod types;
|
||||
|
||||
|
||||
@@ -6,9 +6,10 @@
|
||||
//! of messages.
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
|
||||
use crate::client::Client;
|
||||
use crate::types::{self, Notification, Request};
|
||||
use crate::types;
|
||||
|
||||
pub struct ModelContextProtocol {
|
||||
inner: Client,
|
||||
@@ -20,10 +21,9 @@ impl ModelContextProtocol {
|
||||
}
|
||||
|
||||
fn supported_protocols() -> Vec<types::ProtocolVersion> {
|
||||
vec![
|
||||
types::ProtocolVersion(types::LATEST_PROTOCOL_VERSION.to_string()),
|
||||
types::ProtocolVersion(types::VERSION_2024_11_05.to_string()),
|
||||
]
|
||||
vec![types::ProtocolVersion(
|
||||
types::LATEST_PROTOCOL_VERSION.to_string(),
|
||||
)]
|
||||
}
|
||||
|
||||
pub async fn initialize(
|
||||
@@ -43,7 +43,7 @@ impl ModelContextProtocol {
|
||||
|
||||
let response: types::InitializeResponse = self
|
||||
.inner
|
||||
.request(types::requests::Initialize::METHOD, params)
|
||||
.request(types::RequestType::Initialize.as_str(), params)
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
@@ -54,13 +54,16 @@ impl ModelContextProtocol {
|
||||
|
||||
log::trace!("mcp server info {:?}", response.server_info);
|
||||
|
||||
self.inner.notify(
|
||||
types::NotificationType::Initialized.as_str(),
|
||||
serde_json::json!({}),
|
||||
)?;
|
||||
|
||||
let initialized_protocol = InitializedContextServerProtocol {
|
||||
inner: self.inner,
|
||||
initialize: response,
|
||||
};
|
||||
|
||||
initialized_protocol.notify::<types::notifications::Initialized>(())?;
|
||||
|
||||
Ok(initialized_protocol)
|
||||
}
|
||||
}
|
||||
@@ -91,11 +94,137 @@ impl InitializedContextServerProtocol {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn request<T: Request>(&self, params: T::Params) -> Result<T::Response> {
|
||||
self.inner.request(T::METHOD, params).await
|
||||
fn check_capability(&self, capability: ServerCapability) -> Result<()> {
|
||||
anyhow::ensure!(
|
||||
self.capable(capability),
|
||||
"Server does not support {capability:?} capability"
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn notify<T: Notification>(&self, params: T::Params) -> Result<()> {
|
||||
self.inner.notify(T::METHOD, params)
|
||||
/// List the MCP prompts.
|
||||
pub async fn list_prompts(&self) -> Result<Vec<types::Prompt>> {
|
||||
self.check_capability(ServerCapability::Prompts)?;
|
||||
|
||||
let response: types::PromptsListResponse = self
|
||||
.inner
|
||||
.request(
|
||||
types::RequestType::PromptsList.as_str(),
|
||||
serde_json::json!({}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(response.prompts)
|
||||
}
|
||||
|
||||
/// List the MCP resources.
|
||||
pub async fn list_resources(&self) -> Result<types::ResourcesListResponse> {
|
||||
self.check_capability(ServerCapability::Resources)?;
|
||||
|
||||
let response: types::ResourcesListResponse = self
|
||||
.inner
|
||||
.request(
|
||||
types::RequestType::ResourcesList.as_str(),
|
||||
serde_json::json!({}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// Executes a prompt with the given arguments and returns the result.
|
||||
pub async fn run_prompt<P: AsRef<str>>(
|
||||
&self,
|
||||
prompt: P,
|
||||
arguments: HashMap<String, String>,
|
||||
) -> Result<types::PromptsGetResponse> {
|
||||
self.check_capability(ServerCapability::Prompts)?;
|
||||
|
||||
let params = types::PromptsGetParams {
|
||||
name: prompt.as_ref().to_string(),
|
||||
arguments: Some(arguments),
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let response: types::PromptsGetResponse = self
|
||||
.inner
|
||||
.request(types::RequestType::PromptsGet.as_str(), params)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn completion<P: Into<String>>(
|
||||
&self,
|
||||
reference: types::CompletionReference,
|
||||
argument: P,
|
||||
value: P,
|
||||
) -> Result<types::Completion> {
|
||||
let params = types::CompletionCompleteParams {
|
||||
r#ref: reference,
|
||||
argument: types::CompletionArgument {
|
||||
name: argument.into(),
|
||||
value: value.into(),
|
||||
},
|
||||
meta: None,
|
||||
};
|
||||
let result: types::CompletionCompleteResponse = self
|
||||
.inner
|
||||
.request(types::RequestType::CompletionComplete.as_str(), params)
|
||||
.await?;
|
||||
|
||||
let completion = types::Completion {
|
||||
values: result.completion.values,
|
||||
total: types::CompletionTotal::from_options(
|
||||
result.completion.has_more,
|
||||
result.completion.total,
|
||||
),
|
||||
};
|
||||
|
||||
Ok(completion)
|
||||
}
|
||||
|
||||
/// List MCP tools.
|
||||
pub async fn list_tools(&self) -> Result<types::ListToolsResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let response = self
|
||||
.inner
|
||||
.request::<types::ListToolsResponse>(types::RequestType::ListTools.as_str(), ())
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// Executes a tool with the given arguments
|
||||
pub async fn run_tool<P: AsRef<str>>(
|
||||
&self,
|
||||
tool: P,
|
||||
arguments: Option<HashMap<String, serde_json::Value>>,
|
||||
) -> Result<types::CallToolResponse> {
|
||||
self.check_capability(ServerCapability::Tools)?;
|
||||
|
||||
let params = types::CallToolParams {
|
||||
name: tool.as_ref().to_string(),
|
||||
arguments,
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let response: types::CallToolResponse = self
|
||||
.inner
|
||||
.request(types::RequestType::CallTool.as_str(), params)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
|
||||
impl InitializedContextServerProtocol {
|
||||
pub async fn request<R: serde::de::DeserializeOwned>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: impl serde::Serialize,
|
||||
) -> Result<R> {
|
||||
self.inner.request(method, params).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,118 +0,0 @@
|
||||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
use futures::{Stream, StreamExt as _, lock::Mutex};
|
||||
use gpui::BackgroundExecutor;
|
||||
use std::{pin::Pin, sync::Arc};
|
||||
|
||||
use crate::{
|
||||
transport::Transport,
|
||||
types::{Implementation, InitializeResponse, ProtocolVersion, ServerCapabilities},
|
||||
};
|
||||
|
||||
pub fn create_fake_transport(
|
||||
name: impl Into<String>,
|
||||
executor: BackgroundExecutor,
|
||||
) -> FakeTransport {
|
||||
let name = name.into();
|
||||
FakeTransport::new(executor).on_request::<crate::types::requests::Initialize>(move |_params| {
|
||||
create_initialize_response(name.clone())
|
||||
})
|
||||
}
|
||||
|
||||
fn create_initialize_response(server_name: String) -> InitializeResponse {
|
||||
InitializeResponse {
|
||||
protocol_version: ProtocolVersion(crate::types::LATEST_PROTOCOL_VERSION.to_string()),
|
||||
server_info: Implementation {
|
||||
name: server_name,
|
||||
version: "1.0.0".to_string(),
|
||||
},
|
||||
capabilities: ServerCapabilities::default(),
|
||||
meta: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FakeTransport {
|
||||
request_handlers:
|
||||
HashMap<&'static str, Arc<dyn Fn(serde_json::Value) -> serde_json::Value + Send + Sync>>,
|
||||
tx: futures::channel::mpsc::UnboundedSender<String>,
|
||||
rx: Arc<Mutex<futures::channel::mpsc::UnboundedReceiver<String>>>,
|
||||
executor: BackgroundExecutor,
|
||||
}
|
||||
|
||||
impl FakeTransport {
|
||||
pub fn new(executor: BackgroundExecutor) -> Self {
|
||||
let (tx, rx) = futures::channel::mpsc::unbounded();
|
||||
Self {
|
||||
request_handlers: Default::default(),
|
||||
tx,
|
||||
rx: Arc::new(Mutex::new(rx)),
|
||||
executor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn on_request<T: crate::types::Request>(
|
||||
mut self,
|
||||
handler: impl Fn(T::Params) -> T::Response + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
self.request_handlers.insert(
|
||||
T::METHOD,
|
||||
Arc::new(move |value| {
|
||||
let params = value.get("params").expect("Missing parameters").clone();
|
||||
let params: T::Params =
|
||||
serde_json::from_value(params).expect("Invalid parameters received");
|
||||
let response = handler(params);
|
||||
serde_json::to_value(response).unwrap()
|
||||
}),
|
||||
);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl Transport for FakeTransport {
|
||||
async fn send(&self, message: String) -> anyhow::Result<()> {
|
||||
if let Ok(msg) = serde_json::from_str::<serde_json::Value>(&message) {
|
||||
let id = msg.get("id").and_then(|id| id.as_u64()).unwrap_or(0);
|
||||
|
||||
if let Some(method) = msg.get("method") {
|
||||
let method = method.as_str().expect("Invalid method received");
|
||||
if let Some(handler) = self.request_handlers.get(method) {
|
||||
let payload = handler(msg);
|
||||
let response = serde_json::json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": id,
|
||||
"result": payload
|
||||
});
|
||||
self.tx
|
||||
.unbounded_send(response.to_string())
|
||||
.context("sending a message")?;
|
||||
} else {
|
||||
log::debug!("No handler registered for MCP request '{method}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn receive(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
|
||||
let rx = self.rx.clone();
|
||||
let executor = self.executor.clone();
|
||||
Box::pin(futures::stream::unfold(rx, move |rx| {
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
let mut rx_guard = rx.lock().await;
|
||||
executor.simulate_random_delay().await;
|
||||
if let Some(message) = rx_guard.next().await {
|
||||
drop(rx_guard);
|
||||
Some((message, rx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn receive_err(&self) -> Pin<Box<dyn Stream<Item = String> + Send>> {
|
||||
Box::pin(futures::stream::empty())
|
||||
}
|
||||
}
|
||||
@@ -1,144 +1,76 @@
|
||||
use collections::HashMap;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
pub const LATEST_PROTOCOL_VERSION: &str = "2025-03-26";
|
||||
pub const VERSION_2024_11_05: &str = "2024-11-05";
|
||||
pub const LATEST_PROTOCOL_VERSION: &str = "2024-11-05";
|
||||
|
||||
pub mod requests {
|
||||
use super::*;
|
||||
pub enum RequestType {
|
||||
Initialize,
|
||||
CallTool,
|
||||
ResourcesUnsubscribe,
|
||||
ResourcesSubscribe,
|
||||
ResourcesRead,
|
||||
ResourcesList,
|
||||
LoggingSetLevel,
|
||||
PromptsGet,
|
||||
PromptsList,
|
||||
CompletionComplete,
|
||||
Ping,
|
||||
ListTools,
|
||||
ListResourceTemplates,
|
||||
ListRoots,
|
||||
}
|
||||
|
||||
macro_rules! request {
|
||||
($method:expr, $name:ident, $params:ty, $response:ty) => {
|
||||
pub struct $name;
|
||||
|
||||
impl Request for $name {
|
||||
type Params = $params;
|
||||
type Response = $response;
|
||||
const METHOD: &'static str = $method;
|
||||
}
|
||||
};
|
||||
impl RequestType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
RequestType::Initialize => "initialize",
|
||||
RequestType::CallTool => "tools/call",
|
||||
RequestType::ResourcesUnsubscribe => "resources/unsubscribe",
|
||||
RequestType::ResourcesSubscribe => "resources/subscribe",
|
||||
RequestType::ResourcesRead => "resources/read",
|
||||
RequestType::ResourcesList => "resources/list",
|
||||
RequestType::LoggingSetLevel => "logging/setLevel",
|
||||
RequestType::PromptsGet => "prompts/get",
|
||||
RequestType::PromptsList => "prompts/list",
|
||||
RequestType::CompletionComplete => "completion/complete",
|
||||
RequestType::Ping => "ping",
|
||||
RequestType::ListTools => "tools/list",
|
||||
RequestType::ListResourceTemplates => "resources/templates/list",
|
||||
RequestType::ListRoots => "roots/list",
|
||||
}
|
||||
}
|
||||
|
||||
request!(
|
||||
"initialize",
|
||||
Initialize,
|
||||
InitializeParams,
|
||||
InitializeResponse
|
||||
);
|
||||
request!("tools/call", CallTool, CallToolParams, CallToolResponse);
|
||||
request!(
|
||||
"resources/unsubscribe",
|
||||
ResourcesUnsubscribe,
|
||||
ResourcesUnsubscribeParams,
|
||||
()
|
||||
);
|
||||
request!(
|
||||
"resources/subscribe",
|
||||
ResourcesSubscribe,
|
||||
ResourcesSubscribeParams,
|
||||
()
|
||||
);
|
||||
request!(
|
||||
"resources/read",
|
||||
ResourcesRead,
|
||||
ResourcesReadParams,
|
||||
ResourcesReadResponse
|
||||
);
|
||||
request!("resources/list", ResourcesList, (), ResourcesListResponse);
|
||||
request!(
|
||||
"logging/setLevel",
|
||||
LoggingSetLevel,
|
||||
LoggingSetLevelParams,
|
||||
()
|
||||
);
|
||||
request!(
|
||||
"prompts/get",
|
||||
PromptsGet,
|
||||
PromptsGetParams,
|
||||
PromptsGetResponse
|
||||
);
|
||||
request!("prompts/list", PromptsList, (), PromptsListResponse);
|
||||
request!(
|
||||
"completion/complete",
|
||||
CompletionComplete,
|
||||
CompletionCompleteParams,
|
||||
CompletionCompleteResponse
|
||||
);
|
||||
request!("ping", Ping, (), ());
|
||||
request!("tools/list", ListTools, (), ListToolsResponse);
|
||||
request!(
|
||||
"resources/templates/list",
|
||||
ListResourceTemplates,
|
||||
(),
|
||||
ListResourceTemplatesResponse
|
||||
);
|
||||
request!("roots/list", ListRoots, (), ListRootsResponse);
|
||||
}
|
||||
|
||||
pub trait Request {
|
||||
type Params: DeserializeOwned + Serialize + Send + Sync + 'static;
|
||||
type Response: DeserializeOwned + Serialize + Send + Sync + 'static;
|
||||
const METHOD: &'static str;
|
||||
}
|
||||
impl TryFrom<&str> for RequestType {
|
||||
type Error = ();
|
||||
|
||||
pub mod notifications {
|
||||
use super::*;
|
||||
|
||||
macro_rules! notification {
|
||||
($method:expr, $name:ident, $params:ty) => {
|
||||
pub struct $name;
|
||||
|
||||
impl Notification for $name {
|
||||
type Params = $params;
|
||||
const METHOD: &'static str = $method;
|
||||
}
|
||||
};
|
||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
||||
match s {
|
||||
"initialize" => Ok(RequestType::Initialize),
|
||||
"tools/call" => Ok(RequestType::CallTool),
|
||||
"resources/unsubscribe" => Ok(RequestType::ResourcesUnsubscribe),
|
||||
"resources/subscribe" => Ok(RequestType::ResourcesSubscribe),
|
||||
"resources/read" => Ok(RequestType::ResourcesRead),
|
||||
"resources/list" => Ok(RequestType::ResourcesList),
|
||||
"logging/setLevel" => Ok(RequestType::LoggingSetLevel),
|
||||
"prompts/get" => Ok(RequestType::PromptsGet),
|
||||
"prompts/list" => Ok(RequestType::PromptsList),
|
||||
"completion/complete" => Ok(RequestType::CompletionComplete),
|
||||
"ping" => Ok(RequestType::Ping),
|
||||
"tools/list" => Ok(RequestType::ListTools),
|
||||
"resources/templates/list" => Ok(RequestType::ListResourceTemplates),
|
||||
"roots/list" => Ok(RequestType::ListRoots),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
|
||||
notification!("notifications/initialized", Initialized, ());
|
||||
notification!("notifications/progress", Progress, ProgressParams);
|
||||
notification!("notifications/message", Message, MessageParams);
|
||||
notification!(
|
||||
"notifications/resources/updated",
|
||||
ResourcesUpdated,
|
||||
ResourcesUpdatedParams
|
||||
);
|
||||
notification!(
|
||||
"notifications/resources/list_changed",
|
||||
ResourcesListChanged,
|
||||
()
|
||||
);
|
||||
notification!("notifications/tools/list_changed", ToolsListChanged, ());
|
||||
notification!("notifications/prompts/list_changed", PromptsListChanged, ());
|
||||
notification!("notifications/roots/list_changed", RootsListChanged, ());
|
||||
}
|
||||
|
||||
pub trait Notification {
|
||||
type Params: DeserializeOwned + Serialize + Send + Sync + 'static;
|
||||
const METHOD: &'static str;
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MessageParams {
|
||||
pub level: LoggingLevel,
|
||||
pub logger: Option<String>,
|
||||
pub data: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesUpdatedParams {
|
||||
pub uri: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct ProtocolVersion(pub String);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct InitializeParams {
|
||||
pub protocol_version: ProtocolVersion,
|
||||
@@ -148,7 +80,7 @@ pub struct InitializeParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CallToolParams {
|
||||
pub name: String,
|
||||
@@ -158,7 +90,7 @@ pub struct CallToolParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesUnsubscribeParams {
|
||||
pub uri: Url,
|
||||
@@ -166,7 +98,7 @@ pub struct ResourcesUnsubscribeParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesSubscribeParams {
|
||||
pub uri: Url,
|
||||
@@ -174,7 +106,7 @@ pub struct ResourcesSubscribeParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesReadParams {
|
||||
pub uri: Url,
|
||||
@@ -182,7 +114,7 @@ pub struct ResourcesReadParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoggingSetLevelParams {
|
||||
pub level: LoggingLevel,
|
||||
@@ -190,7 +122,7 @@ pub struct LoggingSetLevelParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PromptsGetParams {
|
||||
pub name: String,
|
||||
@@ -200,40 +132,37 @@ pub struct PromptsGetParams {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionCompleteParams {
|
||||
#[serde(rename = "ref")]
|
||||
pub reference: CompletionReference,
|
||||
pub r#ref: CompletionReference,
|
||||
pub argument: CompletionArgument,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum CompletionReference {
|
||||
Prompt(PromptReference),
|
||||
Resource(ResourceReference),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PromptReference {
|
||||
#[serde(rename = "type")]
|
||||
pub ty: PromptReferenceType,
|
||||
pub r#type: PromptReferenceType,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourceReference {
|
||||
#[serde(rename = "type")]
|
||||
pub ty: PromptReferenceType,
|
||||
pub r#type: PromptReferenceType,
|
||||
pub uri: Url,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum PromptReferenceType {
|
||||
#[serde(rename = "ref/prompt")]
|
||||
@@ -242,7 +171,7 @@ pub enum PromptReferenceType {
|
||||
Resource,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionArgument {
|
||||
pub name: String,
|
||||
@@ -259,7 +188,7 @@ pub struct InitializeResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesReadResponse {
|
||||
pub contents: Vec<ResourceContentsType>,
|
||||
@@ -267,14 +196,14 @@ pub struct ResourcesReadResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ResourceContentsType {
|
||||
Text(TextResourceContents),
|
||||
Blob(BlobResourceContents),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesListResponse {
|
||||
pub resources: Vec<Resource>,
|
||||
@@ -291,7 +220,7 @@ pub struct SamplingMessage {
|
||||
pub content: MessageContent,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CreateMessageRequest {
|
||||
pub messages: Vec<SamplingMessage>,
|
||||
@@ -343,20 +272,13 @@ pub enum MessageContent {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
annotations: Option<MessageAnnotations>,
|
||||
},
|
||||
#[serde(rename = "image", rename_all = "camelCase")]
|
||||
#[serde(rename = "image")]
|
||||
Image {
|
||||
data: String,
|
||||
mime_type: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
annotations: Option<MessageAnnotations>,
|
||||
},
|
||||
#[serde(rename = "audio", rename_all = "camelCase")]
|
||||
Audio {
|
||||
data: String,
|
||||
mime_type: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
annotations: Option<MessageAnnotations>,
|
||||
},
|
||||
#[serde(rename = "resource")]
|
||||
Resource {
|
||||
resource: ResourceContents,
|
||||
@@ -374,7 +296,7 @@ pub struct MessageAnnotations {
|
||||
pub priority: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PromptsGetResponse {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -384,7 +306,7 @@ pub struct PromptsGetResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PromptsListResponse {
|
||||
pub prompts: Vec<Prompt>,
|
||||
@@ -394,7 +316,7 @@ pub struct PromptsListResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionCompleteResponse {
|
||||
pub completion: CompletionResult,
|
||||
@@ -402,7 +324,7 @@ pub struct CompletionCompleteResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionResult {
|
||||
pub values: Vec<String>,
|
||||
@@ -414,7 +336,7 @@ pub struct CompletionResult {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Prompt {
|
||||
pub name: String,
|
||||
@@ -424,7 +346,7 @@ pub struct Prompt {
|
||||
pub arguments: Option<Vec<PromptArgument>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PromptArgument {
|
||||
pub name: String,
|
||||
@@ -453,8 +375,6 @@ pub struct ServerCapabilities {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub logging: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub completions: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub prompts: Option<PromptsCapabilities>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub resources: Option<ResourcesCapabilities>,
|
||||
@@ -499,28 +419,6 @@ pub struct Tool {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub input_schema: serde_json::Value,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub annotations: Option<ToolAnnotations>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ToolAnnotations {
|
||||
/// A human-readable title for the tool.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub title: Option<String>,
|
||||
/// If true, the tool does not modify its environment.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub read_only_hint: Option<bool>,
|
||||
/// If true, the tool may perform destructive updates to its environment.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub destructive_hint: Option<bool>,
|
||||
/// If true, calling the tool repeatedly with the same arguments will have no additional effect on its environment.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub idempotent_hint: Option<bool>,
|
||||
/// If true, this tool may interact with an "open world" of external entities.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub open_world_hint: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -611,6 +509,34 @@ pub struct ModelHint {
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum NotificationType {
|
||||
Initialized,
|
||||
Progress,
|
||||
Message,
|
||||
ResourcesUpdated,
|
||||
ResourcesListChanged,
|
||||
ToolsListChanged,
|
||||
PromptsListChanged,
|
||||
RootsListChanged,
|
||||
}
|
||||
|
||||
impl NotificationType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
NotificationType::Initialized => "notifications/initialized",
|
||||
NotificationType::Progress => "notifications/progress",
|
||||
NotificationType::Message => "notifications/message",
|
||||
NotificationType::ResourcesUpdated => "notifications/resources/updated",
|
||||
NotificationType::ResourcesListChanged => "notifications/resources/list_changed",
|
||||
NotificationType::ToolsListChanged => "notifications/tools/list_changed",
|
||||
NotificationType::PromptsListChanged => "notifications/prompts/list_changed",
|
||||
NotificationType::RootsListChanged => "notifications/roots/list_changed",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ClientNotification {
|
||||
@@ -631,14 +557,12 @@ pub enum ProgressToken {
|
||||
Number(f64),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProgressParams {
|
||||
pub progress_token: ProgressToken,
|
||||
pub progress: f64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub message: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub total: Option<f64>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
@@ -665,7 +589,7 @@ pub struct Completion {
|
||||
pub total: CompletionTotal,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CallToolResponse {
|
||||
pub content: Vec<ToolResponseContent>,
|
||||
@@ -682,8 +606,6 @@ pub enum ToolResponseContent {
|
||||
Text { text: String },
|
||||
#[serde(rename = "image", rename_all = "camelCase")]
|
||||
Image { data: String, mime_type: String },
|
||||
#[serde(rename = "audio", rename_all = "camelCase")]
|
||||
Audio { data: String, mime_type: String },
|
||||
#[serde(rename = "resource")]
|
||||
Resource { resource: ResourceContents },
|
||||
}
|
||||
@@ -698,7 +620,7 @@ pub struct ListToolsResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListResourceTemplatesResponse {
|
||||
pub resource_templates: Vec<ResourceTemplate>,
|
||||
@@ -708,7 +630,7 @@ pub struct ListResourceTemplatesResponse {
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListRootsResponse {
|
||||
pub roots: Vec<Root>,
|
||||
|
||||
@@ -28,7 +28,7 @@ use settings::SettingsStore;
|
||||
use sign_in::{reinstall_and_sign_in_within_workspace, sign_out_within_workspace};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
env::home_dir,
|
||||
env,
|
||||
ffi::OsString,
|
||||
mem,
|
||||
ops::Range,
|
||||
@@ -408,30 +408,24 @@ impl Copilot {
|
||||
let proxy_url = copilot_settings.proxy.clone()?;
|
||||
let no_verify = copilot_settings.proxy_no_verify;
|
||||
let http_or_https_proxy = if proxy_url.starts_with("http:") {
|
||||
Some("HTTP_PROXY")
|
||||
"HTTP_PROXY"
|
||||
} else if proxy_url.starts_with("https:") {
|
||||
Some("HTTPS_PROXY")
|
||||
"HTTPS_PROXY"
|
||||
} else {
|
||||
log::error!(
|
||||
"Unsupported protocol scheme for language server proxy (must be http or https)"
|
||||
);
|
||||
None
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut env = HashMap::default();
|
||||
env.insert(http_or_https_proxy.to_string(), proxy_url);
|
||||
|
||||
if let Some(proxy_type) = http_or_https_proxy {
|
||||
env.insert(proxy_type.to_string(), proxy_url);
|
||||
if let Some(true) = no_verify {
|
||||
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
|
||||
};
|
||||
}
|
||||
if let Some(true) = no_verify {
|
||||
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
|
||||
};
|
||||
|
||||
if let Ok(oauth_token) = env::var(copilot_chat::COPILOT_OAUTH_ENV_VAR) {
|
||||
env.insert(copilot_chat::COPILOT_OAUTH_ENV_VAR.to_string(), oauth_token);
|
||||
}
|
||||
|
||||
if env.is_empty() { None } else { Some(env) }
|
||||
Some(env)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -486,14 +480,11 @@ impl Copilot {
|
||||
env,
|
||||
};
|
||||
|
||||
let root_path = home_dir();
|
||||
let root_path = root_path.as_deref().unwrap_or_else(|| {
|
||||
if cfg!(target_os = "windows") {
|
||||
Path::new("C:/")
|
||||
} else {
|
||||
Path::new("/")
|
||||
}
|
||||
});
|
||||
let root_path = if cfg!(target_os = "windows") {
|
||||
Path::new("C:/")
|
||||
} else {
|
||||
Path::new("/")
|
||||
};
|
||||
|
||||
let server_name = LanguageServerName("copilot".into());
|
||||
let server = LanguageServer::new(
|
||||
@@ -529,7 +520,7 @@ impl Copilot {
|
||||
|
||||
let server = cx
|
||||
.update(|cx| {
|
||||
let mut params = server.default_initialize_params(false, cx);
|
||||
let mut params = server.default_initialize_params(cx);
|
||||
params.initialization_options = Some(editor_info_json);
|
||||
server.initialize(params, configuration.into(), cx)
|
||||
})?
|
||||
|
||||
@@ -8,7 +8,6 @@ use chrono::DateTime;
|
||||
use collections::HashSet;
|
||||
use fs::Fs;
|
||||
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
|
||||
use gpui::WeakEntity;
|
||||
use gpui::{App, AsyncApp, Global, prelude::*};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use itertools::Itertools;
|
||||
@@ -16,14 +15,9 @@ use paths::home_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::watch_config_dir;
|
||||
|
||||
pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
|
||||
|
||||
#[derive(Default, Clone, Debug, PartialEq)]
|
||||
pub struct CopilotChatSettings {
|
||||
pub api_url: Arc<str>,
|
||||
pub auth_url: Arc<str>,
|
||||
pub models_url: Arc<str>,
|
||||
}
|
||||
pub const COPILOT_CHAT_COMPLETION_URL: &str = "https://api.githubcopilot.com/chat/completions";
|
||||
pub const COPILOT_CHAT_AUTH_URL: &str = "https://api.github.com/copilot_internal/v2/token";
|
||||
pub const COPILOT_CHAT_MODELS_URL: &str = "https://api.githubcopilot.com/models";
|
||||
|
||||
// Copilot's base model; defined by Microsoft in premium requests table
|
||||
// This will be moved to the front of the Copilot model list, and will be used for
|
||||
@@ -346,7 +340,6 @@ impl Global for GlobalCopilotChat {}
|
||||
pub struct CopilotChat {
|
||||
oauth_token: Option<String>,
|
||||
api_token: Option<ApiToken>,
|
||||
settings: CopilotChatSettings,
|
||||
models: Option<Vec<Model>>,
|
||||
client: Arc<dyn HttpClient>,
|
||||
}
|
||||
@@ -380,77 +373,62 @@ impl CopilotChat {
|
||||
.map(|model| model.0.clone())
|
||||
}
|
||||
|
||||
fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &App) -> Self {
|
||||
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
|
||||
let dir_path = copilot_chat_config_dir();
|
||||
let settings = CopilotChatSettings::default();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let mut parent_watch_rx = watch_config_dir(
|
||||
cx.background_executor(),
|
||||
fs.clone(),
|
||||
dir_path.clone(),
|
||||
config_paths,
|
||||
);
|
||||
while let Some(contents) = parent_watch_rx.next().await {
|
||||
let oauth_token = extract_oauth_token(contents);
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.oauth_token = oauth_token.clone();
|
||||
cx.notify();
|
||||
})?;
|
||||
cx.spawn({
|
||||
let client = client.clone();
|
||||
async move |cx| {
|
||||
let mut parent_watch_rx = watch_config_dir(
|
||||
cx.background_executor(),
|
||||
fs.clone(),
|
||||
dir_path.clone(),
|
||||
config_paths,
|
||||
);
|
||||
while let Some(contents) = parent_watch_rx.next().await {
|
||||
let oauth_token = extract_oauth_token(contents);
|
||||
cx.update(|cx| {
|
||||
if let Some(this) = Self::global(cx).as_ref() {
|
||||
this.update(cx, |this, cx| {
|
||||
this.oauth_token = oauth_token.clone();
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
})?;
|
||||
|
||||
if oauth_token.is_some() {
|
||||
Self::update_models(&this, cx).await?;
|
||||
if let Some(ref oauth_token) = oauth_token {
|
||||
let api_token = request_api_token(oauth_token, client.clone()).await?;
|
||||
cx.update(|cx| {
|
||||
if let Some(this) = Self::global(cx).as_ref() {
|
||||
this.update(cx, |this, cx| {
|
||||
this.api_token = Some(api_token.clone());
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
})?;
|
||||
let models = get_models(api_token.api_key, client.clone()).await?;
|
||||
cx.update(|cx| {
|
||||
if let Some(this) = Self::global(cx).as_ref() {
|
||||
this.update(cx, |this, cx| {
|
||||
this.models = Some(models);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
})?;
|
||||
}
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let this = Self {
|
||||
oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
|
||||
Self {
|
||||
oauth_token: None,
|
||||
api_token: None,
|
||||
models: None,
|
||||
settings,
|
||||
client,
|
||||
};
|
||||
if this.oauth_token.is_some() {
|
||||
cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
|
||||
let (oauth_token, client, auth_url) = this.read_with(cx, |this, _| {
|
||||
(
|
||||
this.oauth_token.clone(),
|
||||
this.client.clone(),
|
||||
this.settings.auth_url.clone(),
|
||||
)
|
||||
})?;
|
||||
let api_token = request_api_token(
|
||||
&oauth_token.ok_or_else(|| {
|
||||
anyhow!("OAuth token is missing while updating Copilot Chat models")
|
||||
})?,
|
||||
auth_url,
|
||||
client.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let models_url = this.update(cx, |this, cx| {
|
||||
this.api_token = Some(api_token.clone());
|
||||
cx.notify();
|
||||
this.settings.models_url.clone()
|
||||
})?;
|
||||
let models = get_models(models_url, api_token.api_key, client.clone()).await?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.models = Some(models);
|
||||
cx.notify();
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
}
|
||||
|
||||
pub fn is_authenticated(&self) -> bool {
|
||||
@@ -471,23 +449,20 @@ impl CopilotChat {
|
||||
.flatten()
|
||||
.context("Copilot chat is not enabled")?;
|
||||
|
||||
let (oauth_token, api_token, client, api_url, auth_url) =
|
||||
this.read_with(&cx, |this, _| {
|
||||
(
|
||||
this.oauth_token.clone(),
|
||||
this.api_token.clone(),
|
||||
this.client.clone(),
|
||||
this.settings.api_url.clone(),
|
||||
this.settings.auth_url.clone(),
|
||||
)
|
||||
})?;
|
||||
let (oauth_token, api_token, client) = this.read_with(&cx, |this, _| {
|
||||
(
|
||||
this.oauth_token.clone(),
|
||||
this.api_token.clone(),
|
||||
this.client.clone(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let oauth_token = oauth_token.context("No OAuth token available")?;
|
||||
|
||||
let token = match api_token {
|
||||
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
|
||||
_ => {
|
||||
let token = request_api_token(&oauth_token, auth_url, client.clone()).await?;
|
||||
let token = request_api_token(&oauth_token, client.clone()).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.api_token = Some(token.clone());
|
||||
cx.notify();
|
||||
@@ -496,28 +471,12 @@ impl CopilotChat {
|
||||
}
|
||||
};
|
||||
|
||||
stream_completion(client.clone(), token.api_key, api_url, request).await
|
||||
}
|
||||
|
||||
pub fn set_settings(&mut self, settings: CopilotChatSettings, cx: &mut Context<Self>) {
|
||||
let same_settings = self.settings == settings;
|
||||
self.settings = settings;
|
||||
if !same_settings {
|
||||
cx.spawn(async move |this, cx| {
|
||||
Self::update_models(&this, cx).await?;
|
||||
Ok::<_, anyhow::Error>(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
stream_completion(client.clone(), token.api_key, request).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_models(
|
||||
models_url: Arc<str>,
|
||||
api_token: String,
|
||||
client: Arc<dyn HttpClient>,
|
||||
) -> Result<Vec<Model>> {
|
||||
let all_models = request_models(models_url, api_token, client).await?;
|
||||
async fn get_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
|
||||
let all_models = request_models(api_token, client).await?;
|
||||
|
||||
let mut models: Vec<Model> = all_models
|
||||
.into_iter()
|
||||
@@ -545,14 +504,10 @@ async fn get_models(
|
||||
Ok(models)
|
||||
}
|
||||
|
||||
async fn request_models(
|
||||
models_url: Arc<str>,
|
||||
api_token: String,
|
||||
client: Arc<dyn HttpClient>,
|
||||
) -> Result<Vec<Model>> {
|
||||
async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Result<Vec<Model>> {
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::GET)
|
||||
.uri(models_url.as_ref())
|
||||
.uri(COPILOT_CHAT_MODELS_URL)
|
||||
.header("Authorization", format!("Bearer {}", api_token))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Copilot-Integration-Id", "vscode-chat");
|
||||
@@ -576,14 +531,10 @@ async fn request_models(
|
||||
Ok(models)
|
||||
}
|
||||
|
||||
async fn request_api_token(
|
||||
oauth_token: &str,
|
||||
auth_url: Arc<str>,
|
||||
client: Arc<dyn HttpClient>,
|
||||
) -> Result<ApiToken> {
|
||||
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::GET)
|
||||
.uri(auth_url.as_ref())
|
||||
.uri(COPILOT_CHAT_AUTH_URL)
|
||||
.header("Authorization", format!("token {}", oauth_token))
|
||||
.header("Accept", "application/json");
|
||||
|
||||
@@ -628,7 +579,6 @@ fn extract_oauth_token(contents: String) -> Option<String> {
|
||||
async fn stream_completion(
|
||||
client: Arc<dyn HttpClient>,
|
||||
api_key: String,
|
||||
completion_url: Arc<str>,
|
||||
request: Request,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
let is_vision_request = request.messages.last().map_or(false, |message| match message {
|
||||
@@ -642,7 +592,7 @@ async fn stream_completion(
|
||||
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(completion_url.as_ref())
|
||||
.uri(COPILOT_CHAT_COMPLETION_URL)
|
||||
.header(
|
||||
"Editor-Version",
|
||||
format!(
|
||||
|
||||
@@ -39,7 +39,6 @@ file_icons.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
|
||||
@@ -342,7 +342,7 @@ impl DebugPanel {
|
||||
window.defer(cx, move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Debug, None, cx);
|
||||
NewProcessModal::show(workspace, window, NewProcessMode::Launch, None, cx);
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
@@ -19,7 +19,6 @@ use gpui::{
|
||||
InteractiveText, KeyContext, PromptButton, PromptLevel, Render, StyledText, Subscription,
|
||||
TextStyle, UnderlineStyle, WeakEntity,
|
||||
};
|
||||
use itertools::Itertools as _;
|
||||
use picker::{Picker, PickerDelegate, highlighted_match_with_paths::HighlightedMatch};
|
||||
use project::{ProjectPath, TaskContexts, TaskSourceKind, task_store::TaskStore};
|
||||
use settings::{Settings, initial_local_debug_tasks_content};
|
||||
@@ -50,7 +49,7 @@ pub(super) struct NewProcessModal {
|
||||
mode: NewProcessMode,
|
||||
debug_picker: Entity<Picker<DebugDelegate>>,
|
||||
attach_mode: Entity<AttachMode>,
|
||||
launch_mode: Entity<ConfigureMode>,
|
||||
launch_mode: Entity<LaunchMode>,
|
||||
task_mode: TaskMode,
|
||||
debugger: Option<DebugAdapterName>,
|
||||
// save_scenario_state: Option<SaveScenarioState>,
|
||||
@@ -98,13 +97,13 @@ impl NewProcessModal {
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
let attach_mode = AttachMode::new(None, workspace_handle.clone(), window, cx);
|
||||
|
||||
let debug_picker = cx.new(|cx| {
|
||||
let launch_picker = cx.new(|cx| {
|
||||
let delegate =
|
||||
DebugDelegate::new(debug_panel.downgrade(), task_store.clone());
|
||||
Picker::uniform_list(delegate, window, cx).modal(false)
|
||||
});
|
||||
|
||||
let configure_mode = ConfigureMode::new(window, cx);
|
||||
let configure_mode = LaunchMode::new(window, cx);
|
||||
|
||||
let task_overrides = Some(TaskOverrides { reveal_target });
|
||||
|
||||
@@ -123,7 +122,7 @@ impl NewProcessModal {
|
||||
};
|
||||
|
||||
let _subscriptions = [
|
||||
cx.subscribe(&debug_picker, |_, _, _, cx| {
|
||||
cx.subscribe(&launch_picker, |_, _, _, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
}),
|
||||
cx.subscribe(
|
||||
@@ -138,76 +137,19 @@ impl NewProcessModal {
|
||||
];
|
||||
|
||||
cx.spawn_in(window, {
|
||||
let debug_picker = debug_picker.downgrade();
|
||||
let launch_picker = launch_picker.downgrade();
|
||||
let configure_mode = configure_mode.downgrade();
|
||||
let task_modal = task_mode.task_modal.downgrade();
|
||||
let workspace = workspace_handle.clone();
|
||||
|
||||
async move |this, cx| {
|
||||
let task_contexts = task_contexts.await;
|
||||
let task_contexts = Arc::new(task_contexts);
|
||||
let lsp_task_sources = task_contexts.lsp_task_sources.clone();
|
||||
let task_position = task_contexts.latest_selection;
|
||||
// Get LSP tasks and filter out based on language vs lsp preference
|
||||
let (lsp_tasks, prefer_lsp) =
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let lsp_tasks = editor::lsp_tasks(
|
||||
workspace.project().clone(),
|
||||
&lsp_task_sources,
|
||||
task_position,
|
||||
cx,
|
||||
);
|
||||
let prefer_lsp = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.downcast::<Editor>())
|
||||
.map(|editor| {
|
||||
editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.language_settings(cx)
|
||||
.tasks
|
||||
.prefer_lsp
|
||||
})
|
||||
.unwrap_or(false);
|
||||
(lsp_tasks, prefer_lsp)
|
||||
})?;
|
||||
|
||||
let lsp_tasks = lsp_tasks.await;
|
||||
let add_current_language_tasks = !prefer_lsp || lsp_tasks.is_empty();
|
||||
|
||||
let lsp_tasks = lsp_tasks
|
||||
.into_iter()
|
||||
.flat_map(|(kind, tasks_with_locations)| {
|
||||
tasks_with_locations
|
||||
.into_iter()
|
||||
.sorted_by_key(|(location, task)| {
|
||||
(location.is_none(), task.resolved_label.clone())
|
||||
})
|
||||
.map(move |(_, task)| (kind.clone(), task))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let Some(task_inventory) = task_store
|
||||
.update(cx, |task_store, _| task_store.task_inventory().cloned())?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let (used_tasks, current_resolved_tasks) =
|
||||
task_inventory.update(cx, |task_inventory, cx| {
|
||||
task_inventory
|
||||
.used_and_current_resolved_tasks(&task_contexts, cx)
|
||||
})?;
|
||||
|
||||
debug_picker
|
||||
launch_picker
|
||||
.update_in(cx, |picker, window, cx| {
|
||||
picker.delegate.tasks_loaded(
|
||||
picker.delegate.task_contexts_loaded(
|
||||
task_contexts.clone(),
|
||||
languages,
|
||||
lsp_tasks.clone(),
|
||||
current_resolved_tasks.clone(),
|
||||
add_current_language_tasks,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
picker.refresh(window, cx);
|
||||
@@ -228,15 +170,7 @@ impl NewProcessModal {
|
||||
|
||||
task_modal
|
||||
.update_in(cx, |task_modal, window, cx| {
|
||||
task_modal.tasks_loaded(
|
||||
task_contexts,
|
||||
lsp_tasks,
|
||||
used_tasks,
|
||||
current_resolved_tasks,
|
||||
add_current_language_tasks,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
task_modal.task_contexts_loaded(task_contexts, window, cx);
|
||||
})
|
||||
.ok();
|
||||
|
||||
@@ -244,14 +178,12 @@ impl NewProcessModal {
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
debug_picker,
|
||||
debug_picker: launch_picker,
|
||||
attach_mode,
|
||||
launch_mode: configure_mode,
|
||||
task_mode,
|
||||
@@ -888,18 +820,18 @@ impl RenderOnce for AttachMode {
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(super) struct ConfigureMode {
|
||||
pub(super) struct LaunchMode {
|
||||
program: Entity<Editor>,
|
||||
cwd: Entity<Editor>,
|
||||
stop_on_entry: ToggleState,
|
||||
// save_to_debug_json: ToggleState,
|
||||
}
|
||||
|
||||
impl ConfigureMode {
|
||||
impl LaunchMode {
|
||||
pub(super) fn new(window: &mut Window, cx: &mut App) -> Entity<Self> {
|
||||
let program = cx.new(|cx| Editor::single_line(window, cx));
|
||||
program.update(cx, |this, cx| {
|
||||
this.set_placeholder_text("ENV=Zed ~/bin/program --option", cx);
|
||||
this.set_placeholder_text("ENV=Zed ~/bin/debugger --launch", cx);
|
||||
});
|
||||
|
||||
let cwd = cx.new(|cx| Editor::single_line(window, cx));
|
||||
@@ -987,7 +919,7 @@ impl ConfigureMode {
|
||||
.child(adapter_menu),
|
||||
)
|
||||
.child(
|
||||
Label::new("Program")
|
||||
Label::new("Debugger Program")
|
||||
.size(ui::LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
@@ -1135,29 +1067,21 @@ impl DebugDelegate {
|
||||
(language, scenario)
|
||||
}
|
||||
|
||||
pub fn tasks_loaded(
|
||||
pub fn task_contexts_loaded(
|
||||
&mut self,
|
||||
task_contexts: Arc<TaskContexts>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
lsp_tasks: Vec<(TaskSourceKind, task::ResolvedTask)>,
|
||||
current_resolved_tasks: Vec<(TaskSourceKind, task::ResolvedTask)>,
|
||||
add_current_language_tasks: bool,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.task_contexts = Some(task_contexts.clone());
|
||||
self.task_contexts = Some(task_contexts);
|
||||
|
||||
let (recent, scenarios) = self
|
||||
.task_store
|
||||
.update(cx, |task_store, cx| {
|
||||
task_store.task_inventory().map(|inventory| {
|
||||
inventory.update(cx, |inventory, cx| {
|
||||
inventory.list_debug_scenarios(
|
||||
&task_contexts,
|
||||
lsp_tasks,
|
||||
current_resolved_tasks,
|
||||
add_current_language_tasks,
|
||||
cx,
|
||||
)
|
||||
inventory.list_debug_scenarios(self.task_contexts.as_ref().unwrap(), cx)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1333,17 +1257,12 @@ impl PickerDelegate for DebugDelegate {
|
||||
.map(|icon| icon.color(Color::Muted).size(IconSize::Small));
|
||||
let indicator = if matches!(task_kind, Some(TaskSourceKind::Lsp { .. })) {
|
||||
Some(Indicator::icon(
|
||||
Icon::new(IconName::BoltFilled)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
Icon::new(IconName::BoltFilled).color(Color::Muted),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let icon = icon.map(|icon| {
|
||||
IconWithIndicator::new(icon, indicator)
|
||||
.indicator_border_color(Some(cx.theme().colors().border_transparent))
|
||||
});
|
||||
let icon = icon.map(|icon| IconWithIndicator::new(icon, indicator));
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(format!("debug-scenario-selection-{ix}")))
|
||||
|
||||
@@ -282,6 +282,16 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_completion_indices: Vec<usize>,
|
||||
_completions: Rc<RefCell<Box<[Completion]>>>,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> gpui::Task<anyhow::Result<bool>> {
|
||||
Task::ready(Ok(false))
|
||||
}
|
||||
|
||||
fn apply_additional_edits_for_completion(
|
||||
&self,
|
||||
_buffer: Entity<Buffer>,
|
||||
|
||||
@@ -11,7 +11,7 @@ use editor::{
|
||||
};
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{DiagnosticSourceKind, Rope};
|
||||
use language::Rope;
|
||||
use lsp::LanguageServerId;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::FakeFs;
|
||||
@@ -105,7 +105,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
|
||||
}
|
||||
],
|
||||
version: None
|
||||
}, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
|
||||
}, &[], cx).unwrap();
|
||||
});
|
||||
|
||||
// Open the project diagnostics view while there are already diagnostics.
|
||||
@@ -176,8 +176,6 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -263,8 +261,6 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
|
||||
],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -372,8 +368,6 @@ async fn test_diagnostics_with_folds(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -471,8 +465,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -515,8 +507,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -558,8 +548,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -572,8 +560,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
diagnostics: vec![],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -614,8 +600,6 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -748,8 +732,6 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
|
||||
diagnostics: diagnostics.clone(),
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -937,8 +919,6 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
|
||||
diagnostics: diagnostics.clone(),
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -994,8 +974,6 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
|
||||
..Default::default()
|
||||
}],
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -1029,8 +1007,6 @@ async fn active_diagnostics_dismiss_after_invalidation(cx: &mut TestAppContext)
|
||||
version: None,
|
||||
diagnostics: Vec::new(),
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -1112,8 +1088,6 @@ async fn cycle_through_same_place_diagnostics(cx: &mut TestAppContext) {
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -1252,8 +1226,6 @@ async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
|
||||
..Default::default()
|
||||
}],
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -1305,8 +1277,6 @@ async fn test_hover_diagnostic_and_info_popovers(cx: &mut gpui::TestAppContext)
|
||||
..Default::default()
|
||||
}],
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -1408,8 +1378,6 @@ async fn test_diagnostics_with_code(cx: &mut TestAppContext) {
|
||||
],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -464,7 +464,7 @@ impl BlockMap {
|
||||
map
|
||||
}
|
||||
|
||||
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader<'_> {
|
||||
pub fn read(&self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapReader {
|
||||
self.sync(&wrap_snapshot, edits);
|
||||
*self.wrap_snapshot.borrow_mut() = wrap_snapshot.clone();
|
||||
BlockMapReader {
|
||||
@@ -479,7 +479,7 @@ impl BlockMap {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter<'_> {
|
||||
pub fn write(&mut self, wrap_snapshot: WrapSnapshot, edits: Patch<u32>) -> BlockMapWriter {
|
||||
self.sync(&wrap_snapshot, edits);
|
||||
*self.wrap_snapshot.borrow_mut() = wrap_snapshot;
|
||||
BlockMapWriter(self)
|
||||
@@ -1327,7 +1327,7 @@ impl BlockSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows<'_> {
|
||||
pub(super) fn row_infos(&self, start_row: BlockRow) -> BlockRows {
|
||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
|
||||
cursor.seek(&start_row, Bias::Right, &());
|
||||
let (output_start, input_start) = cursor.start();
|
||||
|
||||
@@ -357,7 +357,7 @@ impl FoldMap {
|
||||
&mut self,
|
||||
inlay_snapshot: InlaySnapshot,
|
||||
edits: Vec<InlayEdit>,
|
||||
) -> (FoldMapWriter<'_>, FoldSnapshot, Vec<FoldEdit>) {
|
||||
) -> (FoldMapWriter, FoldSnapshot, Vec<FoldEdit>) {
|
||||
let (snapshot, edits) = self.read(inlay_snapshot, edits);
|
||||
(FoldMapWriter(self), snapshot, edits)
|
||||
}
|
||||
@@ -730,7 +730,7 @@ impl FoldSnapshot {
|
||||
(line_end - line_start) as u32
|
||||
}
|
||||
|
||||
pub fn row_infos(&self, start_row: u32) -> FoldRows<'_> {
|
||||
pub fn row_infos(&self, start_row: u32) -> FoldRows {
|
||||
if start_row > self.transforms.summary().output.lines.row {
|
||||
panic!("invalid display row {}", start_row);
|
||||
}
|
||||
|
||||
@@ -726,7 +726,7 @@ impl WrapSnapshot {
|
||||
self.transforms.summary().output.longest_row
|
||||
}
|
||||
|
||||
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
|
||||
pub fn row_infos(&self, start_row: u32) -> WrapRows {
|
||||
let mut transforms = self.transforms.cursor::<(WrapPoint, TabPoint)>(&());
|
||||
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left, &());
|
||||
let mut input_row = transforms.start().1.row();
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -49,7 +49,6 @@ pub struct EditorSettings {
|
||||
#[serde(default)]
|
||||
pub diagnostics_max_severity: Option<DiagnosticSeverity>,
|
||||
pub inline_code_actions: bool,
|
||||
pub drag_and_drop_selection: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
@@ -423,7 +422,7 @@ pub struct EditorSettingsContent {
|
||||
/// Default: always
|
||||
pub seed_search_query_from_cursor: Option<SeedQuerySetting>,
|
||||
pub use_smartcase_search: Option<bool>,
|
||||
/// Determines the modifier to be used to add multiple cursors with the mouse. The open hover link mouse gestures will adapt such that it do not conflict with the multicursor modifier.
|
||||
/// The key to use for adding multiple cursors
|
||||
///
|
||||
/// Default: alt
|
||||
pub multi_cursor_modifier: Option<MultiCursorModifier>,
|
||||
@@ -496,11 +495,6 @@ pub struct EditorSettingsContent {
|
||||
///
|
||||
/// Default: true
|
||||
pub inline_code_actions: Option<bool>,
|
||||
|
||||
/// Whether to allow drag and drop text selection in buffer.
|
||||
///
|
||||
/// Default: true
|
||||
pub drag_and_drop_selection: Option<bool>,
|
||||
}
|
||||
|
||||
// Toolbar related settings
|
||||
|
||||
@@ -6300,296 +6300,6 @@ async fn test_add_selection_above_below(cx: &mut TestAppContext) {
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_add_selection_above_below_multi_cursor(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne two
|
||||
line three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple cursors expand in the same direction
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
liˇne three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple cursors expand below overflow
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
liˇne thˇree
|
||||
liˇne foˇur"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple cursors retrieves back correctly
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
liˇne thˇree
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple cursor groups maintain independent direction - first expands up, second shrinks above
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"liˇne onˇe
|
||||
liˇne two
|
||||
line three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.undo_selection(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test undo
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
line three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.redo_selection(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test redo
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"liˇne onˇe
|
||||
liˇne two
|
||||
line three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.set_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
ijkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple selections expand in the same direction
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"ab«cdˇ»
|
||||
ef«ghˇ»
|
||||
«iˇ»jkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple selection upward overflow
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"ab«cdˇ»
|
||||
«eˇ»f«ghˇ»
|
||||
«iˇ»jkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple selection retrieves back correctly
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
«iˇ»jkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test multiple cursor groups maintain independent direction - first shrinks down, second expands below
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
ij«klˇ»
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.undo_selection(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test undo
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
«iˇ»jkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.redo_selection(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test redo
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
ij«klˇ»
|
||||
«mˇ»nop"#
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne two
|
||||
line three
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// initial state with two multi cursor groups
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
liˇne thˇree
|
||||
liˇne foˇur"#
|
||||
));
|
||||
|
||||
// add single cursor in middle - simulate opt click
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let new_cursor_point = DisplayPoint::new(DisplayRow(2), 4);
|
||||
editor.begin_selection(new_cursor_point, true, 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇne twˇo
|
||||
liˇneˇ thˇree
|
||||
liˇne foˇur"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test new added selection expands above and existing selection shrinks
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"line onˇe
|
||||
liˇneˇ twˇo
|
||||
liˇneˇ thˇree
|
||||
line four"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test new added selection expands above and existing selection shrinks
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"lineˇ onˇe
|
||||
liˇneˇ twˇo
|
||||
lineˇ three
|
||||
line four"#
|
||||
));
|
||||
|
||||
// intial state with two selection groups
|
||||
cx.set_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
ijkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
editor.add_selection_above(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"ab«cdˇ»
|
||||
«eˇ»f«ghˇ»
|
||||
«iˇ»jkl
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
// add single selection in middle - simulate opt drag
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
let new_cursor_point = DisplayPoint::new(DisplayRow(2), 3);
|
||||
editor.begin_selection(new_cursor_point, true, 1, window, cx);
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(2), 4),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.end_selection(window, cx);
|
||||
});
|
||||
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"ab«cdˇ»
|
||||
«eˇ»f«ghˇ»
|
||||
«iˇ»jk«lˇ»
|
||||
«mˇ»nop"#
|
||||
));
|
||||
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// test new added selection expands below, others shrinks from above
|
||||
cx.assert_editor_state(indoc!(
|
||||
r#"abcd
|
||||
ef«ghˇ»
|
||||
«iˇ»jk«lˇ»
|
||||
«mˇ»no«pˇ»"#
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_select_next(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -13940,8 +13650,6 @@ async fn go_to_prev_overlapping_diagnostic(executor: BackgroundExecutor, cx: &mu
|
||||
},
|
||||
],
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
@@ -21854,204 +21562,3 @@ fn assert_hunk_revert(
|
||||
cx.assert_editor_state(expected_reverted_text_with_selections);
|
||||
assert_eq!(actual_hunk_statuses_before, expected_hunk_statuses_before);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let diagnostic_requests = Arc::new(AtomicUsize::new(0));
|
||||
let counter = diagnostic_requests.clone();
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/a"),
|
||||
json!({
|
||||
"first.rs": "fn main() { let a = 5; }",
|
||||
"second.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
|
||||
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(rust_lang());
|
||||
let mut fake_servers = language_registry.register_fake_lsp(
|
||||
"Rust",
|
||||
FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
|
||||
lsp::DiagnosticOptions {
|
||||
identifier: None,
|
||||
inter_file_dependencies: true,
|
||||
workspace_diagnostics: true,
|
||||
work_done_progress_options: Default::default(),
|
||||
},
|
||||
)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
let editor = workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
workspace.open_abs_path(
|
||||
PathBuf::from(path!("/a/first.rs")),
|
||||
OpenOptions::default(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
let mut first_request = fake_server
|
||||
.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(move |params, _| {
|
||||
let new_result_id = counter.fetch_add(1, atomic::Ordering::Release) + 1;
|
||||
let result_id = Some(new_result_id.to_string());
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/a/first.rs")).unwrap()
|
||||
);
|
||||
async move {
|
||||
Ok(lsp::DocumentDiagnosticReportResult::Report(
|
||||
lsp::DocumentDiagnosticReport::Full(lsp::RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
|
||||
items: Vec::new(),
|
||||
result_id,
|
||||
},
|
||||
}),
|
||||
))
|
||||
}
|
||||
});
|
||||
|
||||
let ensure_result_id = |expected: Option<String>, cx: &mut TestAppContext| {
|
||||
project.update(cx, |project, cx| {
|
||||
let buffer_id = editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.expect("created a singleton buffer")
|
||||
.read(cx)
|
||||
.remote_id();
|
||||
let buffer_result_id = project.lsp_store().read(cx).result_id(buffer_id);
|
||||
assert_eq!(expected, buffer_result_id);
|
||||
});
|
||||
};
|
||||
|
||||
ensure_result_id(None, cx);
|
||||
cx.executor().advance_clock(Duration::from_millis(60));
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
diagnostic_requests.load(atomic::Ordering::Acquire),
|
||||
1,
|
||||
"Opening file should trigger diagnostic request"
|
||||
);
|
||||
first_request
|
||||
.next()
|
||||
.await
|
||||
.expect("should have sent the first diagnostics pull request");
|
||||
ensure_result_id(Some("1".to_string()), cx);
|
||||
|
||||
// Editing should trigger diagnostics
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.handle_input("2", window, cx)
|
||||
});
|
||||
cx.executor().advance_clock(Duration::from_millis(60));
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
diagnostic_requests.load(atomic::Ordering::Acquire),
|
||||
2,
|
||||
"Editing should trigger diagnostic request"
|
||||
);
|
||||
ensure_result_id(Some("2".to_string()), cx);
|
||||
|
||||
// Moving cursor should not trigger diagnostic request
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.change_selections(None, window, cx, |s| {
|
||||
s.select_ranges([Point::new(0, 0)..Point::new(0, 0)])
|
||||
});
|
||||
});
|
||||
cx.executor().advance_clock(Duration::from_millis(60));
|
||||
cx.executor().run_until_parked();
|
||||
assert_eq!(
|
||||
diagnostic_requests.load(atomic::Ordering::Acquire),
|
||||
2,
|
||||
"Cursor movement should not trigger diagnostic request"
|
||||
);
|
||||
ensure_result_id(Some("2".to_string()), cx);
|
||||
|
||||
// Multiple rapid edits should be debounced
|
||||
for _ in 0..5 {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.handle_input("x", window, cx)
|
||||
});
|
||||
}
|
||||
cx.executor().advance_clock(Duration::from_millis(60));
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let final_requests = diagnostic_requests.load(atomic::Ordering::Acquire);
|
||||
assert!(
|
||||
final_requests <= 4,
|
||||
"Multiple rapid edits should be debounced (got {final_requests} requests)",
|
||||
);
|
||||
ensure_result_id(Some(final_requests.to_string()), cx);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_add_selection_after_moving_with_multiple_cursors(cx: &mut TestAppContext) {
|
||||
// Regression test for issue #11671
|
||||
// Previously, adding a cursor after moving multiple cursors would reset
|
||||
// the cursor count instead of adding to the existing cursors.
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
// Create a simple buffer with cursor at start
|
||||
cx.set_state(indoc! {"
|
||||
ˇaaaa
|
||||
bbbb
|
||||
cccc
|
||||
dddd
|
||||
eeee
|
||||
ffff
|
||||
gggg
|
||||
hhhh"});
|
||||
|
||||
// Add 2 cursors below (so we have 3 total)
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// Verify we have 3 cursors
|
||||
let initial_count = cx.update_editor(|editor, _, _| editor.selections.count());
|
||||
assert_eq!(
|
||||
initial_count, 3,
|
||||
"Should have 3 cursors after adding 2 below"
|
||||
);
|
||||
|
||||
// Move down one line
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.move_down(&MoveDown, window, cx);
|
||||
});
|
||||
|
||||
// Add another cursor below
|
||||
cx.update_editor(|editor, window, cx| {
|
||||
editor.add_selection_below(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
// Should now have 4 cursors (3 original + 1 new)
|
||||
let final_count = cx.update_editor(|editor, _, _| editor.selections.count());
|
||||
assert_eq!(
|
||||
final_count, 4,
|
||||
"Should have 4 cursors after moving and adding another"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use crate::{
|
||||
ActiveDiagnostic, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement,
|
||||
CodeActionSource, ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs,
|
||||
ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId, DisplayDiffHunk,
|
||||
DisplayPoint, DisplayRow, DocumentHighlightRead, DocumentHighlightWrite, EditDisplayMode,
|
||||
Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT,
|
||||
FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor,
|
||||
InlayHintRefreshReason, InlineCompletion, JumpData, LineDown, LineHighlight, LineUp,
|
||||
MAX_LINE_LEN, MIN_LINE_NUMBER_DIGITS, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT,
|
||||
OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt,
|
||||
SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SoftWrap,
|
||||
ActiveDiagnostic, BlockId, COLUMNAR_SELECTION_MODIFIERS, CURSORS_VISIBLE_FOR,
|
||||
ChunkRendererContext, ChunkReplacement, CodeActionSource, ConflictsOurs, ConflictsOursMarker,
|
||||
ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape,
|
||||
CustomBlockId, DisplayDiffHunk, DisplayPoint, DisplayRow, DocumentHighlightRead,
|
||||
DocumentHighlightWrite, EditDisplayMode, Editor, EditorMode, EditorSettings, EditorSnapshot,
|
||||
EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp,
|
||||
HandleInput, HoveredCursor, InlayHintRefreshReason, InlineCompletion, JumpData, LineDown,
|
||||
LineHighlight, LineUp, MAX_LINE_LEN, MIN_LINE_NUMBER_DIGITS, MINIMAP_FONT_SIZE,
|
||||
MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator,
|
||||
Point, RowExt, RowRangeExt, SelectPhase, SelectedTextHighlight, Selection, SoftWrap,
|
||||
StickyHeaderExcerpt, ToPoint, ToggleFold,
|
||||
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
|
||||
display_map::{
|
||||
@@ -17,7 +17,8 @@ use crate::{
|
||||
},
|
||||
editor_settings::{
|
||||
CurrentLineHighlight, DoubleClickInMultibuffer, MinimapThumb, MinimapThumbBorder,
|
||||
ScrollBeyondLastLine, ScrollbarAxes, ScrollbarDiagnostics, ShowMinimap, ShowScrollbar,
|
||||
MultiCursorModifier, ScrollBeyondLastLine, ScrollbarAxes, ScrollbarDiagnostics,
|
||||
ShowMinimap, ShowScrollbar,
|
||||
},
|
||||
git::blame::{BlameRenderer, GitBlame, GlobalBlameRenderer},
|
||||
hover_popover::{
|
||||
@@ -78,11 +79,10 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use sum_tree::Bias;
|
||||
use text::{BufferId, SelectionGoal};
|
||||
use text::BufferId;
|
||||
use theme::{ActiveTheme, Appearance, BufferLineHeight, PlayerColor};
|
||||
use ui::{ButtonLike, KeyBinding, POPOVER_Y_PADDING, Tooltip, h_flex, prelude::*};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use util::post_inc;
|
||||
use util::{RangeExt, ResultExt, debug_panic};
|
||||
use workspace::{CollaboratorId, Workspace, item::Item, notifications::NotifyTaskExt};
|
||||
|
||||
@@ -620,7 +620,6 @@ impl EditorElement {
|
||||
|
||||
let text_hitbox = &position_map.text_hitbox;
|
||||
let gutter_hitbox = &position_map.gutter_hitbox;
|
||||
let point_for_position = position_map.point_for_position(event.position);
|
||||
let mut click_count = event.click_count;
|
||||
let mut modifiers = event.modifiers;
|
||||
|
||||
@@ -634,20 +633,6 @@ impl EditorElement {
|
||||
return;
|
||||
}
|
||||
|
||||
if editor.drag_and_drop_selection_enabled && click_count == 1 {
|
||||
let newest_anchor = editor.selections.newest_anchor();
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let selection = newest_anchor.map(|anchor| anchor.to_display_point(&snapshot));
|
||||
if point_for_position.intersects_selection(&selection) {
|
||||
editor.selection_drag_state = SelectionDragState::ReadyToDrag {
|
||||
selection: newest_anchor.clone(),
|
||||
click_position: event.position,
|
||||
};
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let is_singleton = editor.buffer().read(cx).is_singleton();
|
||||
|
||||
if click_count == 2 && !is_singleton {
|
||||
@@ -691,9 +676,9 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
let point_for_position = position_map.point_for_position(event.position);
|
||||
let position = point_for_position.previous_valid;
|
||||
let multi_cursor_modifier = Editor::multi_cursor_modifier(true, &modifiers, cx);
|
||||
if Editor::columnar_selection_modifiers(multi_cursor_modifier, &modifiers) {
|
||||
if modifiers == COLUMNAR_SELECTION_MODIFIERS {
|
||||
editor.select(
|
||||
SelectPhase::BeginColumnar {
|
||||
position,
|
||||
@@ -714,6 +699,11 @@ impl EditorElement {
|
||||
cx,
|
||||
);
|
||||
} else {
|
||||
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
|
||||
let multi_cursor_modifier = match multi_cursor_setting {
|
||||
MultiCursorModifier::Alt => modifiers.alt,
|
||||
MultiCursorModifier::CmdOrCtrl => modifiers.secondary(),
|
||||
};
|
||||
editor.select(
|
||||
SelectPhase::Begin {
|
||||
position,
|
||||
@@ -831,47 +821,6 @@ impl EditorElement {
|
||||
let text_hitbox = &position_map.text_hitbox;
|
||||
let end_selection = editor.has_pending_selection();
|
||||
let pending_nonempty_selections = editor.has_pending_nonempty_selection();
|
||||
let point_for_position = position_map.point_for_position(event.position);
|
||||
|
||||
match editor.selection_drag_state {
|
||||
SelectionDragState::ReadyToDrag {
|
||||
selection: _,
|
||||
ref click_position,
|
||||
} => {
|
||||
if event.position == *click_position {
|
||||
editor.select(
|
||||
SelectPhase::Begin {
|
||||
position: point_for_position.previous_valid,
|
||||
add: false,
|
||||
click_count: 1, // ready to drag state only occurs on click count 1
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.selection_drag_state = SelectionDragState::None;
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
SelectionDragState::Dragging { ref selection, .. } => {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let selection_display = selection.map(|anchor| anchor.to_display_point(&snapshot));
|
||||
if !point_for_position.intersects_selection(&selection_display) {
|
||||
let is_cut = !event.modifiers.control;
|
||||
editor.move_selection_on_drop(
|
||||
&selection.clone(),
|
||||
point_for_position.previous_valid,
|
||||
is_cut,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.selection_drag_state = SelectionDragState::None;
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if end_selection {
|
||||
editor.select(SelectPhase::End, window, cx);
|
||||
@@ -918,9 +867,13 @@ impl EditorElement {
|
||||
let text_hitbox = &position_map.text_hitbox;
|
||||
let pending_nonempty_selections = editor.has_pending_nonempty_selection();
|
||||
|
||||
let hovered_link_modifier = Editor::multi_cursor_modifier(false, &event.modifiers(), cx);
|
||||
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
|
||||
let multi_cursor_modifier = match multi_cursor_setting {
|
||||
MultiCursorModifier::Alt => event.modifiers().secondary(),
|
||||
MultiCursorModifier::CmdOrCtrl => event.modifiers().alt,
|
||||
};
|
||||
|
||||
if !pending_nonempty_selections && hovered_link_modifier && text_hitbox.is_hovered(window) {
|
||||
if !pending_nonempty_selections && multi_cursor_modifier && text_hitbox.is_hovered(window) {
|
||||
let point = position_map.point_for_position(event.up.position);
|
||||
editor.handle_click_hovered_link(point, event.modifiers(), window, cx);
|
||||
|
||||
@@ -935,15 +888,12 @@ impl EditorElement {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
if !editor.has_pending_selection()
|
||||
&& matches!(editor.selection_drag_state, SelectionDragState::None)
|
||||
{
|
||||
if !editor.has_pending_selection() {
|
||||
return;
|
||||
}
|
||||
|
||||
let text_bounds = position_map.text_hitbox.bounds;
|
||||
let point_for_position = position_map.point_for_position(event.position);
|
||||
|
||||
let mut scroll_delta = gpui::Point::<f32>::default();
|
||||
let vertical_margin = position_map.line_height.min(text_bounds.size.height / 3.0);
|
||||
let top = text_bounds.origin.y + vertical_margin;
|
||||
@@ -975,46 +925,15 @@ impl EditorElement {
|
||||
scroll_delta.x = scale_horizontal_mouse_autoscroll_delta(event.position.x - right);
|
||||
}
|
||||
|
||||
if !editor.has_pending_selection() {
|
||||
let drop_anchor = position_map
|
||||
.snapshot
|
||||
.display_point_to_anchor(point_for_position.previous_valid, Bias::Left);
|
||||
match editor.selection_drag_state {
|
||||
SelectionDragState::Dragging {
|
||||
ref mut drop_cursor,
|
||||
..
|
||||
} => {
|
||||
drop_cursor.start = drop_anchor;
|
||||
drop_cursor.end = drop_anchor;
|
||||
}
|
||||
SelectionDragState::ReadyToDrag { ref selection, .. } => {
|
||||
let drop_cursor = Selection {
|
||||
id: post_inc(&mut editor.selections.next_selection_id),
|
||||
start: drop_anchor,
|
||||
end: drop_anchor,
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
};
|
||||
editor.selection_drag_state = SelectionDragState::Dragging {
|
||||
selection: selection.clone(),
|
||||
drop_cursor,
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
editor.apply_scroll_delta(scroll_delta, window, cx);
|
||||
cx.notify();
|
||||
} else {
|
||||
editor.select(
|
||||
SelectPhase::Update {
|
||||
position: point_for_position.previous_valid,
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
scroll_delta,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
editor.select(
|
||||
SelectPhase::Update {
|
||||
position: point_for_position.previous_valid,
|
||||
goal_column: point_for_position.exact_unclipped.column(),
|
||||
scroll_delta,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn mouse_moved(
|
||||
@@ -1243,34 +1162,6 @@ impl EditorElement {
|
||||
|
||||
let player = editor.current_user_player_color(cx);
|
||||
selections.push((player, layouts));
|
||||
|
||||
if let SelectionDragState::Dragging {
|
||||
ref selection,
|
||||
ref drop_cursor,
|
||||
} = editor.selection_drag_state
|
||||
{
|
||||
if drop_cursor
|
||||
.start
|
||||
.cmp(&selection.start, &snapshot.buffer_snapshot)
|
||||
.eq(&Ordering::Less)
|
||||
|| drop_cursor
|
||||
.end
|
||||
.cmp(&selection.end, &snapshot.buffer_snapshot)
|
||||
.eq(&Ordering::Greater)
|
||||
{
|
||||
let drag_cursor_layout = SelectionLayout::new(
|
||||
drop_cursor.clone(),
|
||||
false,
|
||||
CursorShape::Bar,
|
||||
&snapshot.display_snapshot,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
);
|
||||
let absent_color = cx.theme().players().absent();
|
||||
selections.push((absent_color, vec![drag_cursor_layout]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(collaboration_hub) = &editor.collaboration_hub {
|
||||
@@ -3990,8 +3881,7 @@ impl EditorElement {
|
||||
|
||||
let edit_prediction = if edit_prediction_popover_visible {
|
||||
self.editor.update(cx, move |editor, cx| {
|
||||
let accept_binding =
|
||||
editor.accept_edit_prediction_keybind(false, window, cx);
|
||||
let accept_binding = editor.accept_edit_prediction_keybind(window, cx);
|
||||
let mut element = editor.render_edit_prediction_cursor_popover(
|
||||
min_width,
|
||||
max_width,
|
||||
@@ -5240,7 +5130,7 @@ impl EditorElement {
|
||||
let is_singleton = self.editor.read(cx).is_singleton(cx);
|
||||
|
||||
let line_height = layout.position_map.line_height;
|
||||
window.set_cursor_style(CursorStyle::Arrow, &layout.gutter_hitbox);
|
||||
window.set_cursor_style(CursorStyle::Arrow, Some(&layout.gutter_hitbox));
|
||||
|
||||
for LineNumberLayout {
|
||||
shaped_line,
|
||||
@@ -5267,9 +5157,9 @@ impl EditorElement {
|
||||
// In singleton buffers, we select corresponding lines on the line number click, so use | -like cursor.
|
||||
// In multi buffers, we open file at the line number clicked, so use a pointing hand cursor.
|
||||
if is_singleton {
|
||||
window.set_cursor_style(CursorStyle::IBeam, &hitbox);
|
||||
window.set_cursor_style(CursorStyle::IBeam, Some(&hitbox));
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::PointingHand, &hitbox);
|
||||
window.set_cursor_style(CursorStyle::PointingHand, Some(&hitbox));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5487,7 +5377,7 @@ impl EditorElement {
|
||||
.read(cx)
|
||||
.all_diff_hunks_expanded()
|
||||
{
|
||||
window.set_cursor_style(CursorStyle::PointingHand, hunk_hitbox);
|
||||
window.set_cursor_style(CursorStyle::PointingHand, Some(hunk_hitbox));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5561,7 +5451,7 @@ impl EditorElement {
|
||||
|window| {
|
||||
let editor = self.editor.read(cx);
|
||||
if editor.mouse_cursor_hidden {
|
||||
window.set_window_cursor_style(CursorStyle::None);
|
||||
window.set_cursor_style(CursorStyle::None, None);
|
||||
} else if editor
|
||||
.hovered_link_state
|
||||
.as_ref()
|
||||
@@ -5569,10 +5459,13 @@ impl EditorElement {
|
||||
{
|
||||
window.set_cursor_style(
|
||||
CursorStyle::PointingHand,
|
||||
&layout.position_map.text_hitbox,
|
||||
Some(&layout.position_map.text_hitbox),
|
||||
);
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::IBeam, &layout.position_map.text_hitbox);
|
||||
window.set_cursor_style(
|
||||
CursorStyle::IBeam,
|
||||
Some(&layout.position_map.text_hitbox),
|
||||
);
|
||||
};
|
||||
|
||||
self.paint_lines_background(layout, window, cx);
|
||||
@@ -5713,7 +5606,6 @@ impl EditorElement {
|
||||
let Some(scrollbars_layout) = layout.scrollbars_layout.take() else {
|
||||
return;
|
||||
};
|
||||
let any_scrollbar_dragged = self.editor.read(cx).scroll_manager.any_scrollbar_dragged();
|
||||
|
||||
for (scrollbar_layout, axis) in scrollbars_layout.iter_scrollbars() {
|
||||
let hitbox = &scrollbar_layout.hitbox;
|
||||
@@ -5779,11 +5671,7 @@ impl EditorElement {
|
||||
BorderStyle::Solid,
|
||||
));
|
||||
|
||||
if any_scrollbar_dragged {
|
||||
window.set_window_cursor_style(CursorStyle::Arrow);
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::Arrow, &hitbox);
|
||||
}
|
||||
window.set_cursor_style(CursorStyle::Arrow, Some(&hitbox));
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -5851,7 +5739,7 @@ impl EditorElement {
|
||||
}
|
||||
});
|
||||
|
||||
if any_scrollbar_dragged {
|
||||
if self.editor.read(cx).scroll_manager.any_scrollbar_dragged() {
|
||||
window.on_mouse_event({
|
||||
let editor = self.editor.clone();
|
||||
move |_: &MouseUpEvent, phase, window, cx| {
|
||||
@@ -6237,7 +6125,6 @@ impl EditorElement {
|
||||
fn paint_minimap(&self, layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
|
||||
if let Some(mut layout) = layout.minimap.take() {
|
||||
let minimap_hitbox = layout.thumb_layout.hitbox.clone();
|
||||
let dragging_minimap = self.editor.read(cx).scroll_manager.is_dragging_minimap();
|
||||
|
||||
window.paint_layer(layout.thumb_layout.hitbox.bounds, |window| {
|
||||
window.with_element_namespace("minimap", |window| {
|
||||
@@ -6289,11 +6176,7 @@ impl EditorElement {
|
||||
});
|
||||
});
|
||||
|
||||
if dragging_minimap {
|
||||
window.set_window_cursor_style(CursorStyle::Arrow);
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::Arrow, &minimap_hitbox);
|
||||
}
|
||||
window.set_cursor_style(CursorStyle::Arrow, Some(&minimap_hitbox));
|
||||
|
||||
let minimap_axis = ScrollbarAxis::Vertical;
|
||||
let pixels_per_line = (minimap_hitbox.size.height / layout.max_scroll_top)
|
||||
@@ -6354,7 +6237,7 @@ impl EditorElement {
|
||||
}
|
||||
});
|
||||
|
||||
if dragging_minimap {
|
||||
if self.editor.read(cx).scroll_manager.is_dragging_minimap() {
|
||||
window.on_mouse_event({
|
||||
let editor = self.editor.clone();
|
||||
move |event: &MouseUpEvent, phase, window, cx| {
|
||||
@@ -6782,7 +6665,7 @@ impl AcceptEditPredictionBinding {
|
||||
pub fn keystroke(&self) -> Option<&Keystroke> {
|
||||
if let Some(binding) = self.0.as_ref() {
|
||||
match &binding.keystrokes() {
|
||||
[keystroke, ..] => Some(keystroke),
|
||||
[keystroke] => Some(keystroke),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
@@ -9351,35 +9234,6 @@ impl PointForPosition {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn intersects_selection(&self, selection: &Selection<DisplayPoint>) -> bool {
|
||||
let Some(valid_point) = self.as_valid() else {
|
||||
return false;
|
||||
};
|
||||
let range = selection.range();
|
||||
|
||||
let candidate_row = valid_point.row();
|
||||
let candidate_col = valid_point.column();
|
||||
|
||||
let start_row = range.start.row();
|
||||
let start_col = range.start.column();
|
||||
let end_row = range.end.row();
|
||||
let end_col = range.end.column();
|
||||
|
||||
if candidate_row < start_row || candidate_row > end_row {
|
||||
false
|
||||
} else if start_row == end_row {
|
||||
candidate_col >= start_col && candidate_col < end_col
|
||||
} else {
|
||||
if candidate_row == start_row {
|
||||
candidate_col >= start_col
|
||||
} else if candidate_row == end_row {
|
||||
candidate_col < end_col
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PositionMap {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
Anchor, Editor, EditorSettings, EditorSnapshot, FindAllReferences, GoToDefinition,
|
||||
GoToTypeDefinition, GotoDefinitionKind, InlayId, Navigated, PointForPosition, SelectPhase,
|
||||
editor_settings::GoToDefinitionFallback,
|
||||
editor_settings::{GoToDefinitionFallback, MultiCursorModifier},
|
||||
hover_popover::{self, InlayHover},
|
||||
scroll::ScrollAmount,
|
||||
};
|
||||
@@ -120,7 +120,11 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let hovered_link_modifier = Editor::multi_cursor_modifier(false, &modifiers, cx);
|
||||
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
|
||||
let hovered_link_modifier = match multi_cursor_setting {
|
||||
MultiCursorModifier::Alt => modifiers.secondary(),
|
||||
MultiCursorModifier::CmdOrCtrl => modifiers.alt,
|
||||
};
|
||||
if !hovered_link_modifier || self.has_pending_selection() {
|
||||
self.hide_hovered_link(cx);
|
||||
return;
|
||||
|
||||
@@ -869,7 +869,6 @@ impl InfoPopover {
|
||||
let keyboard_grace = Rc::clone(&self.keyboard_grace);
|
||||
div()
|
||||
.id("info_popover")
|
||||
.occlude()
|
||||
.elevation_2(cx)
|
||||
// Prevent a mouse down/move on the popover from being propagated to the editor,
|
||||
// because that would dismiss the popover.
|
||||
|
||||
@@ -42,8 +42,8 @@ where
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.iter()
|
||||
.filter_map(|selection| Some((selection.head(), selection.head().buffer_id?)))
|
||||
.unique_by(|(_, buffer_id)| *buffer_id)
|
||||
.filter(|selection| selection.start == selection.end)
|
||||
.filter_map(|selection| Some((selection.start, selection.start.buffer_id?)))
|
||||
.filter_map(|(trigger_anchor, buffer_id)| {
|
||||
let buffer = editor.buffer().read(cx).buffer(buffer_id)?;
|
||||
let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
||||
@@ -53,6 +53,7 @@ where
|
||||
None
|
||||
}
|
||||
})
|
||||
.unique_by(|(_, buffer, _)| buffer.read(cx).remote_id())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let applicable_buffer_tasks = applicable_buffers
|
||||
|
||||
@@ -522,12 +522,4 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
|
||||
) -> Option<Task<anyhow::Result<project::ProjectTransaction>>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn pull_diagnostics_for_buffer(
|
||||
&self,
|
||||
_: Entity<Buffer>,
|
||||
_: &mut App,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,6 +132,9 @@ pub fn expand_macro_recursively(
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
if editor.selections.count() == 0 {
|
||||
return;
|
||||
}
|
||||
let Some(project) = &editor.project else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -411,7 +411,7 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
self.collection.display_map(self.cx)
|
||||
}
|
||||
|
||||
pub fn buffer(&self) -> Ref<'_, MultiBufferSnapshot> {
|
||||
pub fn buffer(&self) -> Ref<MultiBufferSnapshot> {
|
||||
self.collection.buffer(self.cx)
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
assistant_tools.workspace = true
|
||||
async-trait.workspace = true
|
||||
async-watch.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
clap.workspace = true
|
||||
@@ -65,6 +66,5 @@ toml.workspace = true
|
||||
unindent.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
watch.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
|
||||
@@ -385,7 +385,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
|
||||
|
||||
extension::init(cx);
|
||||
|
||||
let (mut tx, rx) = watch::channel(None);
|
||||
let (tx, rx) = async_watch::channel(None);
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
let settings = &ProjectSettings::get_global(cx).node;
|
||||
let options = NodeBinaryOptions {
|
||||
|
||||
@@ -294,7 +294,6 @@ impl ExampleContext {
|
||||
| ThreadEvent::MessageDeleted(_)
|
||||
| ThreadEvent::SummaryChanged
|
||||
| ThreadEvent::SummaryGenerated
|
||||
| ThreadEvent::ProfileChanged
|
||||
| ThreadEvent::ReceivedTextChunk
|
||||
| ThreadEvent::StreamedToolUse { .. }
|
||||
| ThreadEvent::CheckpointChanged
|
||||
|
||||
@@ -306,19 +306,17 @@ impl ExampleInstance {
|
||||
|
||||
let thread_store = thread_store.await?;
|
||||
|
||||
let profile_id = meta.profile_id.clone();
|
||||
thread_store.update(cx, |thread_store, cx| thread_store.load_profile_by_id(profile_id, cx)).expect("Failed to load profile");
|
||||
|
||||
let thread =
|
||||
thread_store.update(cx, |thread_store, cx| {
|
||||
let thread = if let Some(json) = &meta.existing_thread_json {
|
||||
if let Some(json) = &meta.existing_thread_json {
|
||||
let serialized = SerializedThread::from_json(json.as_bytes()).expect("Can't read serialized thread");
|
||||
thread_store.create_thread_from_serialized(serialized, cx)
|
||||
} else {
|
||||
thread_store.create_thread(cx)
|
||||
};
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(meta.profile_id.clone(), cx);
|
||||
});
|
||||
thread
|
||||
}
|
||||
})?;
|
||||
|
||||
|
||||
|
||||
@@ -87,7 +87,7 @@ impl extension::Extension for WasmExtension {
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(command.into())
|
||||
}
|
||||
@@ -113,7 +113,7 @@ impl extension::Extension for WasmExtension {
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
@@ -136,7 +136,7 @@ impl extension::Extension for WasmExtension {
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
@@ -161,7 +161,7 @@ impl extension::Extension for WasmExtension {
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
@@ -186,7 +186,7 @@ impl extension::Extension for WasmExtension {
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
@@ -208,7 +208,7 @@ impl extension::Extension for WasmExtension {
|
||||
completions.into_iter().map(Into::into).collect(),
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(labels
|
||||
.into_iter()
|
||||
@@ -234,7 +234,7 @@ impl extension::Extension for WasmExtension {
|
||||
symbols.into_iter().map(Into::into).collect(),
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(labels
|
||||
.into_iter()
|
||||
@@ -256,7 +256,7 @@ impl extension::Extension for WasmExtension {
|
||||
let completions = extension
|
||||
.call_complete_slash_command_argument(store, &command.into(), &arguments)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(completions.into_iter().map(Into::into).collect())
|
||||
}
|
||||
@@ -282,7 +282,7 @@ impl extension::Extension for WasmExtension {
|
||||
let output = extension
|
||||
.call_run_slash_command(store, &command.into(), &arguments, resource)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(output.into())
|
||||
}
|
||||
@@ -302,7 +302,7 @@ impl extension::Extension for WasmExtension {
|
||||
let command = extension
|
||||
.call_context_server_command(store, context_server_id.clone(), project_resource)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(command.into())
|
||||
}
|
||||
.boxed()
|
||||
@@ -325,7 +325,7 @@ impl extension::Extension for WasmExtension {
|
||||
project_resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?
|
||||
.map_err(|err| anyhow!("{err}"))?
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
@@ -343,7 +343,7 @@ impl extension::Extension for WasmExtension {
|
||||
let packages = extension
|
||||
.call_suggest_docs_packages(store, provider.as_ref())
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
Ok(packages)
|
||||
}
|
||||
@@ -369,7 +369,7 @@ impl extension::Extension for WasmExtension {
|
||||
kv_store_resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
@@ -390,7 +390,7 @@ impl extension::Extension for WasmExtension {
|
||||
let dap_binary = extension
|
||||
.call_get_dap_binary(store, dap_name, config, user_installed_path, resource)
|
||||
.await?
|
||||
.map_err(|err| store.data().extension_error(err))?;
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
let dap_binary = dap_binary.try_into()?;
|
||||
Ok(dap_binary)
|
||||
}
|
||||
@@ -406,7 +406,7 @@ impl extension::Extension for WasmExtension {
|
||||
.call_dap_schema(store)
|
||||
.await
|
||||
.and_then(|schema| serde_json::to_value(schema).map_err(|err| err.to_string()))
|
||||
.map_err(|err| store.data().extension_error(err))
|
||||
.map_err(|err| anyhow!(err.to_string()))
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
@@ -680,15 +680,6 @@ impl WasmState {
|
||||
fn work_dir(&self) -> PathBuf {
|
||||
self.host.work_dir.join(self.manifest.id.as_ref())
|
||||
}
|
||||
|
||||
fn extension_error(&self, message: String) -> anyhow::Error {
|
||||
anyhow!(
|
||||
"from extension \"{}\" version {}: {}",
|
||||
self.manifest.name,
|
||||
self.manifest.version,
|
||||
message
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl wasi::WasiView for WasmState {
|
||||
@@ -724,7 +715,7 @@ impl IncrementalCompilationCache {
|
||||
}
|
||||
|
||||
impl CacheStore for IncrementalCompilationCache {
|
||||
fn get(&self, key: &[u8]) -> Option<Cow<'_, [u8]>> {
|
||||
fn get(&self, key: &[u8]) -> Option<Cow<[u8]>> {
|
||||
self.cache.get(key).map(|v| v.into())
|
||||
}
|
||||
|
||||
|
||||
@@ -459,7 +459,7 @@ enum Match {
|
||||
}
|
||||
|
||||
impl Match {
|
||||
fn relative_path(&self) -> Option<&Arc<Path>> {
|
||||
fn path(&self) -> Option<&Arc<Path>> {
|
||||
match self {
|
||||
Match::History { path, .. } => Some(&path.project.path),
|
||||
Match::Search(panel_match) => Some(&panel_match.0.path),
|
||||
@@ -467,26 +467,6 @@ impl Match {
|
||||
}
|
||||
}
|
||||
|
||||
fn abs_path(&self, project: &Entity<Project>, cx: &App) -> Option<PathBuf> {
|
||||
match self {
|
||||
Match::History { path, .. } => path.absolute.clone().or_else(|| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktree_for_id(path.project.worktree_id, cx)?
|
||||
.read(cx)
|
||||
.absolutize(&path.project.path)
|
||||
.ok()
|
||||
}),
|
||||
Match::Search(ProjectPanelOrdMatch(path_match)) => project
|
||||
.read(cx)
|
||||
.worktree_for_id(WorktreeId::from_usize(path_match.worktree_id), cx)?
|
||||
.read(cx)
|
||||
.absolutize(&path_match.path)
|
||||
.ok(),
|
||||
Match::CreateNew(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn panel_match(&self) -> Option<&ProjectPanelOrdMatch> {
|
||||
match self {
|
||||
Match::History { panel_match, .. } => panel_match.as_ref(),
|
||||
@@ -521,7 +501,7 @@ impl Matches {
|
||||
// reason for the matches set to change.
|
||||
self.matches
|
||||
.iter()
|
||||
.position(|m| match m.relative_path() {
|
||||
.position(|m| match m.path() {
|
||||
Some(p) => path.project.path == *p,
|
||||
None => false,
|
||||
})
|
||||
@@ -1590,8 +1570,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
if !settings.file_icons {
|
||||
return None;
|
||||
}
|
||||
let abs_path = path_match.abs_path(&self.project, cx)?;
|
||||
let file_name = abs_path.file_name()?;
|
||||
let file_name = path_match.path()?.file_name()?;
|
||||
let icon = FileIcons::get_icon(file_name.as_ref(), cx)?;
|
||||
Some(Icon::from_path(icon).color(Color::Muted))
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ use futures::future::{self, BoxFuture};
|
||||
use git::{
|
||||
blame::Blame,
|
||||
repository::{
|
||||
AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository,
|
||||
AskPassDelegate, Branch, CommitDetails, CommitOptions, GitRepository,
|
||||
GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode,
|
||||
},
|
||||
status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus},
|
||||
@@ -405,7 +405,6 @@ impl GitRepository for FakeGitRepository {
|
||||
|
||||
fn fetch(
|
||||
&self,
|
||||
_fetch_options: FetchOptions,
|
||||
_askpass: AskPassDelegate,
|
||||
_env: Arc<HashMap<String, String>>,
|
||||
_cx: AsyncApp,
|
||||
|
||||
@@ -46,11 +46,9 @@ actions!(
|
||||
TrashUntrackedFiles,
|
||||
Uncommit,
|
||||
Push,
|
||||
PushTo,
|
||||
ForcePush,
|
||||
Pull,
|
||||
Fetch,
|
||||
FetchFrom,
|
||||
Commit,
|
||||
Amend,
|
||||
Cancel,
|
||||
|
||||
@@ -193,44 +193,6 @@ pub enum ResetMode {
|
||||
Mixed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
pub enum FetchOptions {
|
||||
All,
|
||||
Remote(Remote),
|
||||
}
|
||||
|
||||
impl FetchOptions {
|
||||
pub fn to_proto(&self) -> Option<String> {
|
||||
match self {
|
||||
FetchOptions::All => None,
|
||||
FetchOptions::Remote(remote) => Some(remote.clone().name.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_proto(remote_name: Option<String>) -> Self {
|
||||
match remote_name {
|
||||
Some(name) => FetchOptions::Remote(Remote { name: name.into() }),
|
||||
None => FetchOptions::All,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> SharedString {
|
||||
match self {
|
||||
Self::All => "Fetch all remotes".into(),
|
||||
Self::Remote(remote) => remote.name.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FetchOptions {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FetchOptions::All => write!(f, "--all"),
|
||||
FetchOptions::Remote(remote) => write!(f, "{}", remote.name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Modifies .git/info/exclude temporarily
|
||||
pub struct GitExcludeOverride {
|
||||
git_exclude_path: PathBuf,
|
||||
@@ -323,7 +285,7 @@ pub trait GitRepository: Send + Sync {
|
||||
/// Resolve a list of refs to SHAs.
|
||||
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>>;
|
||||
|
||||
fn head_sha(&self) -> BoxFuture<'_, Option<String>> {
|
||||
fn head_sha(&self) -> BoxFuture<Option<String>> {
|
||||
async move {
|
||||
self.revparse_batch(vec!["HEAD".into()])
|
||||
.await
|
||||
@@ -419,7 +381,6 @@ pub trait GitRepository: Send + Sync {
|
||||
|
||||
fn fetch(
|
||||
&self,
|
||||
fetch_options: FetchOptions,
|
||||
askpass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
// This method takes an AsyncApp to ensure it's invoked on the main thread,
|
||||
@@ -525,7 +486,7 @@ impl GitRepository for RealGitRepository {
|
||||
repo.commondir().into()
|
||||
}
|
||||
|
||||
fn show(&self, commit: String) -> BoxFuture<'_, Result<CommitDetails>> {
|
||||
fn show(&self, commit: String) -> BoxFuture<Result<CommitDetails>> {
|
||||
let working_directory = self.working_directory();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -561,7 +522,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result<CommitDiff>> {
|
||||
fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<Result<CommitDiff>> {
|
||||
let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned)
|
||||
else {
|
||||
return future::ready(Err(anyhow!("no working directory"))).boxed();
|
||||
@@ -668,7 +629,7 @@ impl GitRepository for RealGitRepository {
|
||||
commit: String,
|
||||
mode: ResetMode,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
) -> BoxFuture<Result<()>> {
|
||||
async move {
|
||||
let working_directory = self.working_directory();
|
||||
|
||||
@@ -698,7 +659,7 @@ impl GitRepository for RealGitRepository {
|
||||
commit: String,
|
||||
paths: Vec<RepoPath>,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
) -> BoxFuture<Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
async move {
|
||||
@@ -723,7 +684,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn load_index_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
|
||||
fn load_index_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
|
||||
// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
|
||||
const GIT_MODE_SYMLINK: u32 = 0o120000;
|
||||
|
||||
@@ -756,7 +717,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>> {
|
||||
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<Option<String>> {
|
||||
let repo = self.repository.clone();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -777,7 +738,7 @@ impl GitRepository for RealGitRepository {
|
||||
path: RepoPath,
|
||||
content: Option<String>,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, anyhow::Result<()>> {
|
||||
) -> BoxFuture<anyhow::Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -841,7 +802,7 @@ impl GitRepository for RealGitRepository {
|
||||
remote.url().map(|url| url.to_string())
|
||||
}
|
||||
|
||||
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
|
||||
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<Result<Vec<Option<String>>>> {
|
||||
let working_directory = self.working_directory();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -891,14 +852,14 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn merge_message(&self) -> BoxFuture<'_, Option<String>> {
|
||||
fn merge_message(&self) -> BoxFuture<Option<String>> {
|
||||
let path = self.path().join("MERGE_MSG");
|
||||
self.executor
|
||||
.spawn(async move { std::fs::read_to_string(&path).ok() })
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<'_, Result<GitStatus>> {
|
||||
fn status(&self, path_prefixes: &[RepoPath]) -> BoxFuture<Result<GitStatus>> {
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
let working_directory = self.working_directory();
|
||||
let path_prefixes = path_prefixes.to_owned();
|
||||
@@ -919,7 +880,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
|
||||
fn branches(&self) -> BoxFuture<Result<Vec<Branch>>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -986,7 +947,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||
fn change_branch(&self, name: String) -> BoxFuture<Result<()>> {
|
||||
let repo = self.repository.clone();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -1018,7 +979,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
|
||||
fn create_branch(&self, name: String) -> BoxFuture<Result<()>> {
|
||||
let repo = self.repository.clone();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -1030,7 +991,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<'_, Result<crate::blame::Blame>> {
|
||||
fn blame(&self, path: RepoPath, content: Rope) -> BoxFuture<Result<crate::blame::Blame>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
|
||||
@@ -1052,7 +1013,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result<String>> {
|
||||
fn diff(&self, diff: DiffType) -> BoxFuture<Result<String>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -1083,7 +1044,7 @@ impl GitRepository for RealGitRepository {
|
||||
&self,
|
||||
paths: Vec<RepoPath>,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
) -> BoxFuture<Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -1111,7 +1072,7 @@ impl GitRepository for RealGitRepository {
|
||||
&self,
|
||||
paths: Vec<RepoPath>,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
) -> BoxFuture<Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
|
||||
@@ -1143,7 +1104,7 @@ impl GitRepository for RealGitRepository {
|
||||
name_and_email: Option<(SharedString, SharedString)>,
|
||||
options: CommitOptions,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
) -> BoxFuture<Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
@@ -1182,7 +1143,7 @@ impl GitRepository for RealGitRepository {
|
||||
ask_pass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
cx: AsyncApp,
|
||||
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
||||
let working_directory = self.working_directory();
|
||||
let executor = cx.background_executor().clone();
|
||||
async move {
|
||||
@@ -1214,7 +1175,7 @@ impl GitRepository for RealGitRepository {
|
||||
ask_pass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
cx: AsyncApp,
|
||||
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
||||
let working_directory = self.working_directory();
|
||||
let executor = cx.background_executor().clone();
|
||||
async move {
|
||||
@@ -1235,20 +1196,18 @@ impl GitRepository for RealGitRepository {
|
||||
|
||||
fn fetch(
|
||||
&self,
|
||||
fetch_options: FetchOptions,
|
||||
ask_pass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
cx: AsyncApp,
|
||||
) -> BoxFuture<'_, Result<RemoteCommandOutput>> {
|
||||
) -> BoxFuture<Result<RemoteCommandOutput>> {
|
||||
let working_directory = self.working_directory();
|
||||
let remote_name = format!("{}", fetch_options);
|
||||
let executor = cx.background_executor().clone();
|
||||
async move {
|
||||
let mut command = new_smol_command("git");
|
||||
command
|
||||
.envs(env.iter())
|
||||
.current_dir(&working_directory?)
|
||||
.args(["fetch", &remote_name])
|
||||
.args(["fetch", "--all"])
|
||||
.stdout(smol::process::Stdio::piped())
|
||||
.stderr(smol::process::Stdio::piped());
|
||||
|
||||
@@ -1257,7 +1216,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<'_, Result<Vec<Remote>>> {
|
||||
fn get_remotes(&self, branch_name: Option<String>) -> BoxFuture<Result<Vec<Remote>>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -1303,7 +1262,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result<Vec<SharedString>>> {
|
||||
fn check_for_pushed_commit(&self) -> BoxFuture<Result<Vec<SharedString>>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
self.executor
|
||||
@@ -1396,7 +1355,7 @@ impl GitRepository for RealGitRepository {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> {
|
||||
fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
|
||||
@@ -1435,7 +1394,7 @@ impl GitRepository for RealGitRepository {
|
||||
&self,
|
||||
left: GitRepositoryCheckpoint,
|
||||
right: GitRepositoryCheckpoint,
|
||||
) -> BoxFuture<'_, Result<bool>> {
|
||||
) -> BoxFuture<Result<bool>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
|
||||
@@ -1474,7 +1433,7 @@ impl GitRepository for RealGitRepository {
|
||||
&self,
|
||||
base_checkpoint: GitRepositoryCheckpoint,
|
||||
target_checkpoint: GitRepositoryCheckpoint,
|
||||
) -> BoxFuture<'_, Result<String>> {
|
||||
) -> BoxFuture<Result<String>> {
|
||||
let working_directory = self.working_directory();
|
||||
let git_binary_path = self.git_binary_path.clone();
|
||||
|
||||
|
||||
@@ -20,19 +20,18 @@ use editor::{
|
||||
use futures::StreamExt as _;
|
||||
use git::blame::ParsedCommitMessage;
|
||||
use git::repository::{
|
||||
Branch, CommitDetails, CommitOptions, CommitSummary, DiffType, FetchOptions, PushOptions,
|
||||
Remote, RemoteCommandOutput, ResetMode, Upstream, UpstreamTracking, UpstreamTrackingStatus,
|
||||
Branch, CommitDetails, CommitOptions, CommitSummary, DiffType, PushOptions, Remote,
|
||||
RemoteCommandOutput, ResetMode, Upstream, UpstreamTracking, UpstreamTrackingStatus,
|
||||
};
|
||||
use git::status::StageStatus;
|
||||
use git::{Amend, ToggleStaged, repository::RepoPath, status::FileStatus};
|
||||
use git::{ExpandCommitEditor, RestoreTrackedFiles, StageAll, TrashUntrackedFiles, UnstageAll};
|
||||
use gpui::{
|
||||
Action, Animation, AnimationExt as _, AsyncApp, AsyncWindowContext, Axis, ClickEvent, Corner,
|
||||
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext,
|
||||
ListHorizontalSizingBehavior, ListSizingBehavior, Modifiers, ModifiersChangedEvent,
|
||||
MouseButton, MouseDownEvent, Point, PromptLevel, ScrollStrategy, Subscription, Task,
|
||||
Transformation, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, percentage,
|
||||
uniform_list,
|
||||
Action, Animation, AnimationExt as _, Axis, ClickEvent, Corner, DismissEvent, Entity,
|
||||
EventEmitter, FocusHandle, Focusable, KeyContext, ListHorizontalSizingBehavior,
|
||||
ListSizingBehavior, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, Point,
|
||||
PromptLevel, ScrollStrategy, Subscription, Task, Transformation, UniformListScrollHandle,
|
||||
WeakEntity, actions, anchored, deferred, percentage, uniform_list,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, File};
|
||||
@@ -64,11 +63,11 @@ use ui::{
|
||||
Tooltip, prelude::*,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt, maybe};
|
||||
|
||||
use workspace::AppState;
|
||||
use workspace::{
|
||||
Workspace,
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
notifications::{DetachAndPromptErr, ErrorMessagePrompt, NotificationId},
|
||||
notifications::DetachAndPromptErr,
|
||||
};
|
||||
use zed_llm_client::CompletionIntent;
|
||||
|
||||
@@ -384,154 +383,151 @@ pub(crate) fn commit_message_editor(
|
||||
commit_editor.set_show_gutter(false, cx);
|
||||
commit_editor.set_show_wrap_guides(false, cx);
|
||||
commit_editor.set_show_indent_guides(false, cx);
|
||||
commit_editor.set_hard_wrap(Some(72), cx);
|
||||
let placeholder = placeholder.unwrap_or("Enter commit message".into());
|
||||
commit_editor.set_placeholder_text(placeholder, cx);
|
||||
commit_editor
|
||||
}
|
||||
|
||||
impl GitPanel {
|
||||
fn new(
|
||||
workspace: &mut Workspace,
|
||||
pub fn new(
|
||||
workspace: Entity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
app_state: Arc<AppState>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let project = workspace.project().clone();
|
||||
let app_state = workspace.app_state().clone();
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let fs = app_state.fs.clone();
|
||||
let git_store = project.read(cx).git_store().clone();
|
||||
let active_repository = project.read(cx).active_repository(cx);
|
||||
let workspace = workspace.downgrade();
|
||||
|
||||
let git_panel = cx.new(|cx| {
|
||||
let focus_handle = cx.focus_handle();
|
||||
cx.on_focus(&focus_handle, window, Self::focus_in).detach();
|
||||
cx.on_focus_out(&focus_handle, window, |this, _, window, cx| {
|
||||
this.hide_scrollbars(window, cx);
|
||||
})
|
||||
.detach();
|
||||
let focus_handle = cx.focus_handle();
|
||||
cx.on_focus(&focus_handle, window, Self::focus_in).detach();
|
||||
cx.on_focus_out(&focus_handle, window, |this, _, window, cx| {
|
||||
this.hide_scrollbars(window, cx);
|
||||
})
|
||||
.detach();
|
||||
|
||||
let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
if is_sort_by_path != was_sort_by_path {
|
||||
this.update_visible_entries(cx);
|
||||
}
|
||||
was_sort_by_path = is_sort_by_path
|
||||
})
|
||||
.detach();
|
||||
let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
if is_sort_by_path != was_sort_by_path {
|
||||
this.update_visible_entries(cx);
|
||||
}
|
||||
was_sort_by_path = is_sort_by_path
|
||||
})
|
||||
.detach();
|
||||
|
||||
// just to let us render a placeholder editor.
|
||||
// Once the active git repo is set, this buffer will be replaced.
|
||||
let temporary_buffer = cx.new(|cx| Buffer::local("", cx));
|
||||
let commit_editor = cx.new(|cx| {
|
||||
commit_message_editor(temporary_buffer, None, project.clone(), true, window, cx)
|
||||
});
|
||||
|
||||
commit_editor.update(cx, |editor, cx| {
|
||||
editor.clear(window, cx);
|
||||
});
|
||||
|
||||
let scroll_handle = UniformListScrollHandle::new();
|
||||
|
||||
let vertical_scrollbar = ScrollbarProperties {
|
||||
axis: Axis::Vertical,
|
||||
state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
|
||||
show_scrollbar: false,
|
||||
show_track: false,
|
||||
auto_hide: false,
|
||||
hide_task: None,
|
||||
};
|
||||
|
||||
let horizontal_scrollbar = ScrollbarProperties {
|
||||
axis: Axis::Horizontal,
|
||||
state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
|
||||
show_scrollbar: false,
|
||||
show_track: false,
|
||||
auto_hide: false,
|
||||
hide_task: None,
|
||||
};
|
||||
|
||||
let mut assistant_enabled = AgentSettings::get_global(cx).enabled;
|
||||
let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
|
||||
if assistant_enabled != AgentSettings::get_global(cx).enabled {
|
||||
assistant_enabled = AgentSettings::get_global(cx).enabled;
|
||||
cx.notify();
|
||||
}
|
||||
});
|
||||
|
||||
cx.subscribe_in(
|
||||
&git_store,
|
||||
window,
|
||||
move |this, _git_store, event, window, cx| match event {
|
||||
GitStoreEvent::ActiveRepositoryChanged(_) => {
|
||||
this.active_repository = this.project.read(cx).active_repository(cx);
|
||||
this.schedule_update(true, window, cx);
|
||||
}
|
||||
GitStoreEvent::RepositoryUpdated(
|
||||
_,
|
||||
RepositoryEvent::Updated { full_scan },
|
||||
true,
|
||||
) => {
|
||||
this.schedule_update(*full_scan, window, cx);
|
||||
}
|
||||
|
||||
GitStoreEvent::RepositoryAdded(_) | GitStoreEvent::RepositoryRemoved(_) => {
|
||||
this.schedule_update(false, window, cx);
|
||||
}
|
||||
GitStoreEvent::IndexWriteError(error) => {
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.show_error(error, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
GitStoreEvent::RepositoryUpdated(_, _, _) => {}
|
||||
GitStoreEvent::JobsUpdated | GitStoreEvent::ConflictsUpdated => {}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
let mut this = Self {
|
||||
active_repository,
|
||||
commit_editor,
|
||||
conflicted_count: 0,
|
||||
conflicted_staged_count: 0,
|
||||
current_modifiers: window.modifiers(),
|
||||
add_coauthors: true,
|
||||
generate_commit_message_task: None,
|
||||
entries: Vec::new(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
fs,
|
||||
new_count: 0,
|
||||
new_staged_count: 0,
|
||||
pending: Vec::new(),
|
||||
pending_commit: None,
|
||||
amend_pending: false,
|
||||
pending_serialization: Task::ready(None),
|
||||
single_staged_entry: None,
|
||||
single_tracked_entry: None,
|
||||
project,
|
||||
scroll_handle,
|
||||
max_width_item_index: None,
|
||||
selected_entry: None,
|
||||
marked_entries: Vec::new(),
|
||||
tracked_count: 0,
|
||||
tracked_staged_count: 0,
|
||||
update_visible_entries_task: Task::ready(()),
|
||||
width: None,
|
||||
show_placeholders: false,
|
||||
context_menu: None,
|
||||
workspace: workspace.weak_handle(),
|
||||
modal_open: false,
|
||||
entry_count: 0,
|
||||
horizontal_scrollbar,
|
||||
vertical_scrollbar,
|
||||
_settings_subscription,
|
||||
};
|
||||
|
||||
this.schedule_update(false, window, cx);
|
||||
this
|
||||
// just to let us render a placeholder editor.
|
||||
// Once the active git repo is set, this buffer will be replaced.
|
||||
let temporary_buffer = cx.new(|cx| Buffer::local("", cx));
|
||||
let commit_editor = cx.new(|cx| {
|
||||
commit_message_editor(temporary_buffer, None, project.clone(), true, window, cx)
|
||||
});
|
||||
|
||||
commit_editor.update(cx, |editor, cx| {
|
||||
editor.clear(window, cx);
|
||||
});
|
||||
|
||||
let scroll_handle = UniformListScrollHandle::new();
|
||||
|
||||
cx.subscribe_in(
|
||||
&git_store,
|
||||
window,
|
||||
move |this, git_store, event, window, cx| match event {
|
||||
GitStoreEvent::ActiveRepositoryChanged(_) => {
|
||||
this.active_repository = git_store.read(cx).active_repository();
|
||||
this.schedule_update(true, window, cx);
|
||||
}
|
||||
GitStoreEvent::RepositoryUpdated(
|
||||
_,
|
||||
RepositoryEvent::Updated { full_scan },
|
||||
true,
|
||||
) => {
|
||||
this.schedule_update(*full_scan, window, cx);
|
||||
}
|
||||
|
||||
GitStoreEvent::RepositoryAdded(_) | GitStoreEvent::RepositoryRemoved(_) => {
|
||||
this.schedule_update(false, window, cx);
|
||||
}
|
||||
GitStoreEvent::IndexWriteError(error) => {
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.show_error(error, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
GitStoreEvent::RepositoryUpdated(_, _, _) => {}
|
||||
GitStoreEvent::JobsUpdated | GitStoreEvent::ConflictsUpdated => {}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
let vertical_scrollbar = ScrollbarProperties {
|
||||
axis: Axis::Vertical,
|
||||
state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
|
||||
show_scrollbar: false,
|
||||
show_track: false,
|
||||
auto_hide: false,
|
||||
hide_task: None,
|
||||
};
|
||||
|
||||
let horizontal_scrollbar = ScrollbarProperties {
|
||||
axis: Axis::Horizontal,
|
||||
state: ScrollbarState::new(scroll_handle.clone()).parent_entity(&cx.entity()),
|
||||
show_scrollbar: false,
|
||||
show_track: false,
|
||||
auto_hide: false,
|
||||
hide_task: None,
|
||||
};
|
||||
|
||||
let mut assistant_enabled = AgentSettings::get_global(cx).enabled;
|
||||
let _settings_subscription = cx.observe_global::<SettingsStore>(move |_, cx| {
|
||||
if assistant_enabled != AgentSettings::get_global(cx).enabled {
|
||||
assistant_enabled = AgentSettings::get_global(cx).enabled;
|
||||
cx.notify();
|
||||
}
|
||||
});
|
||||
|
||||
let mut git_panel = Self {
|
||||
active_repository,
|
||||
commit_editor,
|
||||
conflicted_count: 0,
|
||||
conflicted_staged_count: 0,
|
||||
current_modifiers: window.modifiers(),
|
||||
add_coauthors: true,
|
||||
generate_commit_message_task: None,
|
||||
entries: Vec::new(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
fs,
|
||||
new_count: 0,
|
||||
new_staged_count: 0,
|
||||
pending: Vec::new(),
|
||||
pending_commit: None,
|
||||
amend_pending: false,
|
||||
pending_serialization: Task::ready(None),
|
||||
single_staged_entry: None,
|
||||
single_tracked_entry: None,
|
||||
project,
|
||||
scroll_handle,
|
||||
max_width_item_index: None,
|
||||
selected_entry: None,
|
||||
marked_entries: Vec::new(),
|
||||
tracked_count: 0,
|
||||
tracked_staged_count: 0,
|
||||
update_visible_entries_task: Task::ready(()),
|
||||
width: None,
|
||||
show_placeholders: false,
|
||||
context_menu: None,
|
||||
workspace,
|
||||
modal_open: false,
|
||||
entry_count: 0,
|
||||
horizontal_scrollbar,
|
||||
vertical_scrollbar,
|
||||
_settings_subscription,
|
||||
};
|
||||
git_panel.schedule_update(false, window, cx);
|
||||
git_panel
|
||||
}
|
||||
|
||||
@@ -1487,48 +1483,15 @@ impl GitPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn custom_or_suggested_commit_message(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<String> {
|
||||
let git_commit_language = self.commit_editor.read(cx).language_at(0, cx);
|
||||
fn custom_or_suggested_commit_message(&self, cx: &mut Context<Self>) -> Option<String> {
|
||||
let message = self.commit_editor.read(cx).text(cx);
|
||||
if message.is_empty() {
|
||||
return self
|
||||
.suggest_commit_message(cx)
|
||||
.filter(|message| !message.trim().is_empty());
|
||||
} else if message.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(message, cx);
|
||||
buffer.set_language(git_commit_language, cx);
|
||||
buffer
|
||||
});
|
||||
let editor = cx.new(|cx| Editor::for_buffer(buffer, None, window, cx));
|
||||
let wrapped_message = editor.update(cx, |editor, cx| {
|
||||
editor.select_all(&Default::default(), window, cx);
|
||||
editor.rewrap(&Default::default(), window, cx);
|
||||
editor.text(cx)
|
||||
});
|
||||
if wrapped_message.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(wrapped_message)
|
||||
}
|
||||
|
||||
fn has_commit_message(&self, cx: &mut Context<Self>) -> bool {
|
||||
let text = self.commit_editor.read(cx).text(cx);
|
||||
if !text.trim().is_empty() {
|
||||
return true;
|
||||
} else if text.is_empty() {
|
||||
return self
|
||||
.suggest_commit_message(cx)
|
||||
.is_some_and(|text| !text.trim().is_empty());
|
||||
} else {
|
||||
return false;
|
||||
if !message.trim().is_empty() {
|
||||
return Some(message);
|
||||
}
|
||||
|
||||
self.suggest_commit_message(cx)
|
||||
.filter(|message| !message.trim().is_empty())
|
||||
}
|
||||
|
||||
pub(crate) fn commit_changes(
|
||||
@@ -1557,7 +1520,7 @@ impl GitPanel {
|
||||
return;
|
||||
}
|
||||
|
||||
let commit_message = self.custom_or_suggested_commit_message(window, cx);
|
||||
let commit_message = self.custom_or_suggested_commit_message(cx);
|
||||
|
||||
let Some(mut message) = commit_message else {
|
||||
self.commit_editor.read(cx).focus_handle(cx).focus(window);
|
||||
@@ -1779,19 +1742,7 @@ impl GitPanel {
|
||||
this.generate_commit_message_task.take();
|
||||
});
|
||||
|
||||
let mut diff_text = match diff.await {
|
||||
Ok(result) => match result {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
Self::show_commit_message_error(&this, &e, cx);
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
Self::show_commit_message_error(&this, &e, cx);
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
};
|
||||
let mut diff_text = diff.await??;
|
||||
|
||||
const ONE_MB: usize = 1_000_000;
|
||||
if diff_text.len() > ONE_MB {
|
||||
@@ -1829,37 +1780,26 @@ impl GitPanel {
|
||||
};
|
||||
|
||||
let stream = model.stream_completion_text(request, &cx);
|
||||
match stream.await {
|
||||
Ok(mut messages) => {
|
||||
if !text_empty {
|
||||
this.update(cx, |this, cx| {
|
||||
this.commit_message_buffer(cx).update(cx, |buffer, cx| {
|
||||
let insert_position = buffer.anchor_before(buffer.len());
|
||||
buffer.edit([(insert_position..insert_position, "\n")], None, cx)
|
||||
});
|
||||
})?;
|
||||
}
|
||||
let mut messages = stream.await?;
|
||||
|
||||
while let Some(message) = messages.stream.next().await {
|
||||
match message {
|
||||
Ok(text) => {
|
||||
this.update(cx, |this, cx| {
|
||||
this.commit_message_buffer(cx).update(cx, |buffer, cx| {
|
||||
let insert_position = buffer.anchor_before(buffer.len());
|
||||
buffer.edit([(insert_position..insert_position, text)], None, cx);
|
||||
});
|
||||
})?;
|
||||
}
|
||||
Err(e) => {
|
||||
Self::show_commit_message_error(&this, &e, cx);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
Self::show_commit_message_error(&this, &e, cx);
|
||||
}
|
||||
if !text_empty {
|
||||
this.update(cx, |this, cx| {
|
||||
this.commit_message_buffer(cx).update(cx, |buffer, cx| {
|
||||
let insert_position = buffer.anchor_before(buffer.len());
|
||||
buffer.edit([(insert_position..insert_position, "\n")], None, cx)
|
||||
});
|
||||
})?;
|
||||
}
|
||||
|
||||
while let Some(message) = messages.stream.next().await {
|
||||
let text = message?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.commit_message_buffer(cx).update(cx, |buffer, cx| {
|
||||
let insert_position = buffer.anchor_before(buffer.len());
|
||||
buffer.edit([(insert_position..insert_position, text)], None, cx);
|
||||
});
|
||||
})?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
@@ -1868,49 +1808,7 @@ impl GitPanel {
|
||||
}));
|
||||
}
|
||||
|
||||
fn get_fetch_options(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Option<FetchOptions>> {
|
||||
let repo = self.active_repository.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
let repo = repo?;
|
||||
let remotes = repo
|
||||
.update(cx, |repo, _| repo.get_remotes(None))
|
||||
.ok()?
|
||||
.await
|
||||
.ok()?
|
||||
.log_err()?;
|
||||
|
||||
let mut remotes: Vec<_> = remotes.into_iter().map(FetchOptions::Remote).collect();
|
||||
if remotes.len() > 1 {
|
||||
remotes.push(FetchOptions::All);
|
||||
}
|
||||
let selection = cx
|
||||
.update(|window, cx| {
|
||||
picker_prompt::prompt(
|
||||
"Pick which remote to fetch",
|
||||
remotes.iter().map(|r| r.name()).collect(),
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.ok()?
|
||||
.await?;
|
||||
remotes.get(selection).cloned()
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fetch(
|
||||
&mut self,
|
||||
is_fetch_all: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
pub(crate) fn fetch(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if !self.can_push_and_pull(cx) {
|
||||
return;
|
||||
}
|
||||
@@ -1921,28 +1819,13 @@ impl GitPanel {
|
||||
telemetry::event!("Git Fetched");
|
||||
let askpass = self.askpass_delegate("git fetch", window, cx);
|
||||
let this = cx.weak_entity();
|
||||
|
||||
let fetch_options = if is_fetch_all {
|
||||
Task::ready(Some(FetchOptions::All))
|
||||
} else {
|
||||
self.get_fetch_options(window, cx)
|
||||
};
|
||||
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
let Some(fetch_options) = fetch_options.await else {
|
||||
return Ok(());
|
||||
};
|
||||
let fetch = repo.update(cx, |repo, cx| {
|
||||
repo.fetch(fetch_options.clone(), askpass, cx)
|
||||
})?;
|
||||
let fetch = repo.update(cx, |repo, cx| repo.fetch(askpass, cx))?;
|
||||
|
||||
let remote_message = fetch.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
let action = match fetch_options {
|
||||
FetchOptions::All => RemoteAction::Fetch(None),
|
||||
FetchOptions::Remote(remote) => RemoteAction::Fetch(Some(remote)),
|
||||
};
|
||||
let action = RemoteAction::Fetch;
|
||||
match remote_message {
|
||||
Ok(remote_message) => this.show_remote_output(action, remote_message, cx),
|
||||
Err(e) => {
|
||||
@@ -2053,7 +1936,7 @@ impl GitPanel {
|
||||
};
|
||||
telemetry::event!("Git Pulled");
|
||||
let branch = branch.clone();
|
||||
let remote = self.get_remote(false, window, cx);
|
||||
let remote = self.get_current_remote(window, cx);
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let remote = match remote.await {
|
||||
Ok(Some(remote)) => remote,
|
||||
@@ -2098,13 +1981,7 @@ impl GitPanel {
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
pub(crate) fn push(
|
||||
&mut self,
|
||||
force_push: bool,
|
||||
select_remote: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
pub(crate) fn push(&mut self, force_push: bool, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if !self.can_push_and_pull(cx) {
|
||||
return;
|
||||
}
|
||||
@@ -2129,7 +2006,7 @@ impl GitPanel {
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
let remote = self.get_remote(select_remote, window, cx);
|
||||
let remote = self.get_current_remote(window, cx);
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let remote = match remote.await {
|
||||
@@ -2203,9 +2080,8 @@ impl GitPanel {
|
||||
!self.project.read(cx).is_via_collab()
|
||||
}
|
||||
|
||||
fn get_remote(
|
||||
fn get_current_remote(
|
||||
&mut self,
|
||||
always_select: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl Future<Output = anyhow::Result<Option<Remote>>> + use<> {
|
||||
@@ -2215,37 +2091,38 @@ impl GitPanel {
|
||||
|
||||
async move {
|
||||
let repo = repo.context("No active repository")?;
|
||||
let current_remotes: Vec<Remote> = repo
|
||||
let mut current_remotes: Vec<Remote> = repo
|
||||
.update(&mut cx, |repo, _| {
|
||||
let current_branch = if always_select {
|
||||
None
|
||||
} else {
|
||||
let current_branch = repo.branch.as_ref().context("No active branch")?;
|
||||
Some(current_branch.name().to_string())
|
||||
};
|
||||
anyhow::Ok(repo.get_remotes(current_branch))
|
||||
let current_branch = repo.branch.as_ref().context("No active branch")?;
|
||||
anyhow::Ok(repo.get_remotes(Some(current_branch.name().to_string())))
|
||||
})??
|
||||
.await??;
|
||||
|
||||
let current_remotes: Vec<_> = current_remotes
|
||||
.into_iter()
|
||||
.map(|remotes| remotes.name)
|
||||
.collect();
|
||||
let selection = cx
|
||||
.update(|window, cx| {
|
||||
picker_prompt::prompt(
|
||||
"Pick which remote to push to",
|
||||
current_remotes.clone(),
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
if current_remotes.len() == 0 {
|
||||
anyhow::bail!("No active remote");
|
||||
} else if current_remotes.len() == 1 {
|
||||
return Ok(Some(current_remotes.pop().unwrap()));
|
||||
} else {
|
||||
let current_remotes: Vec<_> = current_remotes
|
||||
.into_iter()
|
||||
.map(|remotes| remotes.name)
|
||||
.collect();
|
||||
let selection = cx
|
||||
.update(|window, cx| {
|
||||
picker_prompt::prompt(
|
||||
"Pick which remote to push to",
|
||||
current_remotes.clone(),
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
|
||||
Ok(selection.map(|selection| Remote {
|
||||
name: current_remotes[selection].clone(),
|
||||
}))
|
||||
Ok(selection.map(|selection| Remote {
|
||||
name: current_remotes[selection].clone(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2717,26 +2594,6 @@ impl GitPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn show_commit_message_error<E>(weak_this: &WeakEntity<Self>, err: &E, cx: &mut AsyncApp)
|
||||
where
|
||||
E: std::fmt::Debug + std::fmt::Display,
|
||||
{
|
||||
if let Ok(Some(workspace)) = weak_this.update(cx, |this, _cx| this.workspace.upgrade()) {
|
||||
let _ = workspace.update(cx, |workspace, cx| {
|
||||
struct CommitMessageError;
|
||||
let notification_id = NotificationId::unique::<CommitMessageError>();
|
||||
workspace.show_notification(notification_id, cx, |cx| {
|
||||
cx.new(|cx| {
|
||||
ErrorMessagePrompt::new(
|
||||
format!("Failed to generate commit message: {err}"),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn show_remote_output(&self, action: RemoteAction, info: RemoteCommandOutput, cx: &mut App) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
@@ -2975,7 +2832,7 @@ impl GitPanel {
|
||||
(false, "No changes to commit")
|
||||
} else if self.pending_commit.is_some() {
|
||||
(false, "Commit in progress")
|
||||
} else if !self.has_commit_message(cx) {
|
||||
} else if self.custom_or_suggested_commit_message(cx).is_none() {
|
||||
(false, "No commit message")
|
||||
} else if !self.has_write_access(cx) {
|
||||
(false, "You do not have write access to this project")
|
||||
@@ -4189,32 +4046,6 @@ impl GitPanel {
|
||||
self.amend_pending = value;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub async fn load(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
mut cx: AsyncWindowContext,
|
||||
) -> anyhow::Result<Entity<Self>> {
|
||||
let serialized_panel = cx
|
||||
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(&GIT_PANEL_KEY) })
|
||||
.await
|
||||
.context("loading git panel")
|
||||
.log_err()
|
||||
.flatten()
|
||||
.and_then(|panel| serde_json::from_str::<SerializedGitPanel>(&panel).log_err());
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let panel = GitPanel::new(workspace, window, cx);
|
||||
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.width = serialized_panel.width;
|
||||
cx.notify();
|
||||
})
|
||||
}
|
||||
|
||||
panel
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn current_language_model(cx: &Context<'_, GitPanel>) -> Option<Arc<dyn LanguageModel>> {
|
||||
@@ -4926,7 +4757,7 @@ impl Component for PanelRepoFooter {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::status::StatusCode;
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use gpui::TestAppContext;
|
||||
use project::{FakeFs, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
@@ -4990,9 +4821,8 @@ mod tests {
|
||||
|
||||
let project =
|
||||
Project::test(fs.clone(), [path!("/root/zed/crates/gpui").as_ref()], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
@@ -5009,7 +4839,10 @@ mod tests {
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let panel = workspace.update(cx, GitPanel::new).unwrap();
|
||||
let app_state = workspace.read_with(cx, |workspace, _| workspace.app_state().clone());
|
||||
let panel = cx.new_window_entity(|window, cx| {
|
||||
GitPanel::new(workspace.clone(), project.clone(), app_state, window, cx)
|
||||
});
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
|
||||
@@ -59,15 +59,7 @@ pub fn init(cx: &mut App) {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.fetch(true, window, cx);
|
||||
});
|
||||
});
|
||||
workspace.register_action(|workspace, _: &git::FetchFrom, window, cx| {
|
||||
let Some(panel) = workspace.panel::<git_panel::GitPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.fetch(false, window, cx);
|
||||
panel.fetch(window, cx);
|
||||
});
|
||||
});
|
||||
workspace.register_action(|workspace, _: &git::Push, window, cx| {
|
||||
@@ -75,15 +67,7 @@ pub fn init(cx: &mut App) {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.push(false, false, window, cx);
|
||||
});
|
||||
});
|
||||
workspace.register_action(|workspace, _: &git::PushTo, window, cx| {
|
||||
let Some(panel) = workspace.panel::<git_panel::GitPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.push(false, true, window, cx);
|
||||
panel.push(false, window, cx);
|
||||
});
|
||||
});
|
||||
workspace.register_action(|workspace, _: &git::ForcePush, window, cx| {
|
||||
@@ -91,7 +75,7 @@ pub fn init(cx: &mut App) {
|
||||
return;
|
||||
};
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.push(true, false, window, cx);
|
||||
panel.push(true, window, cx);
|
||||
});
|
||||
});
|
||||
workspace.register_action(|workspace, _: &git::Pull, window, cx| {
|
||||
@@ -383,11 +367,9 @@ mod remote_button {
|
||||
el.context(keybinding_target.clone())
|
||||
})
|
||||
.action("Fetch", git::Fetch.boxed_clone())
|
||||
.action("Fetch From", git::FetchFrom.boxed_clone())
|
||||
.action("Pull", git::Pull.boxed_clone())
|
||||
.separator()
|
||||
.action("Push", git::Push.boxed_clone())
|
||||
.action("Push To", git::PushTo.boxed_clone())
|
||||
.action("Force Push", git::ForcePush.boxed_clone())
|
||||
}))
|
||||
})
|
||||
|
||||
@@ -28,8 +28,6 @@ pub fn prompt(
|
||||
) -> Task<Option<usize>> {
|
||||
if options.is_empty() {
|
||||
return Task::ready(None);
|
||||
} else if options.len() == 1 {
|
||||
return Task::ready(Some(0));
|
||||
}
|
||||
let prompt = prompt.to_string().into();
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ use util::ResultExt as _;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum RemoteAction {
|
||||
Fetch(Option<Remote>),
|
||||
Fetch,
|
||||
Pull(Remote),
|
||||
Push(SharedString, Remote),
|
||||
}
|
||||
@@ -14,7 +14,7 @@ pub enum RemoteAction {
|
||||
impl RemoteAction {
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self {
|
||||
RemoteAction::Fetch(_) => "fetch",
|
||||
RemoteAction::Fetch => "fetch",
|
||||
RemoteAction::Pull(_) => "pull",
|
||||
RemoteAction::Push(_, _) => "push",
|
||||
}
|
||||
@@ -34,19 +34,15 @@ pub struct SuccessMessage {
|
||||
|
||||
pub fn format_output(action: &RemoteAction, output: RemoteCommandOutput) -> SuccessMessage {
|
||||
match action {
|
||||
RemoteAction::Fetch(remote) => {
|
||||
RemoteAction::Fetch => {
|
||||
if output.stderr.is_empty() {
|
||||
SuccessMessage {
|
||||
message: "Already up to date".into(),
|
||||
style: SuccessStyle::Toast,
|
||||
}
|
||||
} else {
|
||||
let message = match remote {
|
||||
Some(remote) => format!("Synchronized with {}", remote.name),
|
||||
None => "Synchronized with remotes".into(),
|
||||
};
|
||||
SuccessMessage {
|
||||
message,
|
||||
message: "Synchronized with remotes".into(),
|
||||
style: SuccessStyle::ToastWithLog { output },
|
||||
}
|
||||
}
|
||||
|
||||
@@ -508,16 +508,6 @@ pub enum Model {
|
||||
Gemini25ProPreview0325,
|
||||
#[serde(rename = "gemini-2.5-flash-preview-04-17")]
|
||||
Gemini25FlashPreview0417,
|
||||
#[serde(
|
||||
rename = "gemini-2.5-flash-preview-latest",
|
||||
alias = "gemini-2.5-flash-preview-05-20"
|
||||
)]
|
||||
Gemini25FlashPreview,
|
||||
#[serde(
|
||||
rename = "gemini-2.5-pro-preview-latest",
|
||||
alias = "gemini-2.5-pro-preview-06-05"
|
||||
)]
|
||||
Gemini25ProPreview,
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
name: String,
|
||||
@@ -545,24 +535,6 @@ impl Model {
|
||||
Model::Gemini25ProExp0325 => "gemini-2.5-pro-exp-03-25",
|
||||
Model::Gemini25ProPreview0325 => "gemini-2.5-pro-preview-03-25",
|
||||
Model::Gemini25FlashPreview0417 => "gemini-2.5-flash-preview-04-17",
|
||||
Model::Gemini25FlashPreview => "gemini-2.5-flash-preview-latest",
|
||||
Model::Gemini25ProPreview => "gemini-2.5-pro-preview-latest",
|
||||
Model::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Model::Gemini15Pro => "gemini-1.5-pro",
|
||||
Model::Gemini15Flash => "gemini-1.5-flash",
|
||||
Model::Gemini20Pro => "gemini-2.0-pro-exp",
|
||||
Model::Gemini20Flash => "gemini-2.0-flash",
|
||||
Model::Gemini20FlashThinking => "gemini-2.0-flash-thinking-exp",
|
||||
Model::Gemini20FlashLite => "gemini-2.0-flash-lite-preview",
|
||||
Model::Gemini25ProExp0325 => "gemini-2.5-pro-exp-03-25",
|
||||
Model::Gemini25ProPreview0325 => "gemini-2.5-pro-preview-03-25",
|
||||
Model::Gemini25FlashPreview0417 => "gemini-2.5-flash-preview-04-17",
|
||||
Model::Gemini25FlashPreview => "gemini-2.5-flash-preview-05-20",
|
||||
Model::Gemini25ProPreview => "gemini-2.5-pro-preview-06-05",
|
||||
Model::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
@@ -576,10 +548,8 @@ impl Model {
|
||||
Model::Gemini20FlashThinking => "Gemini 2.0 Flash Thinking",
|
||||
Model::Gemini20FlashLite => "Gemini 2.0 Flash Lite",
|
||||
Model::Gemini25ProExp0325 => "Gemini 2.5 Pro Exp",
|
||||
Model::Gemini25ProPreview0325 => "Gemini 2.5 Pro Preview (0325)",
|
||||
Model::Gemini25FlashPreview0417 => "Gemini 2.5 Flash Preview (0417)",
|
||||
Model::Gemini25FlashPreview => "Gemini 2.5 Flash Preview",
|
||||
Model::Gemini25ProPreview => "Gemini 2.5 Pro Preview",
|
||||
Model::Gemini25ProPreview0325 => "Gemini 2.5 Pro Preview",
|
||||
Model::Gemini25FlashPreview0417 => "Gemini 2.5 Flash Preview",
|
||||
Self::Custom {
|
||||
name, display_name, ..
|
||||
} => display_name.as_ref().unwrap_or(name),
|
||||
@@ -599,8 +569,6 @@ impl Model {
|
||||
Model::Gemini25ProExp0325 => ONE_MILLION,
|
||||
Model::Gemini25ProPreview0325 => ONE_MILLION,
|
||||
Model::Gemini25FlashPreview0417 => ONE_MILLION,
|
||||
Model::Gemini25FlashPreview => ONE_MILLION,
|
||||
Model::Gemini25ProPreview => ONE_MILLION,
|
||||
Model::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
@@ -614,8 +582,6 @@ impl Model {
|
||||
| Self::Gemini20FlashThinking
|
||||
| Self::Gemini20FlashLite
|
||||
| Self::Gemini25ProExp0325
|
||||
| Self::Gemini25ProPreview
|
||||
| Self::Gemini25FlashPreview
|
||||
| Self::Gemini25ProPreview0325
|
||||
| Self::Gemini25FlashPreview0417 => GoogleModelMode::Default,
|
||||
Self::Custom { mode, .. } => *mode,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user