Compare commits

..

1 Commits

Author SHA1 Message Date
Thorsten Ball
1ac6a2f5c2 linux: add profling annotations & extend example 2024-07-10 13:08:51 +02:00
448 changed files with 12058 additions and 30186 deletions

View File

@@ -1,28 +0,0 @@
# .git-blame-ignore-revs
#
# This file consists of a list of commits that should be ignored for
# `git blame` purposes. This is useful for ignoring commits that only
# changed whitespace / indentation / formatting, but did not change
# the underlying syntax tree.
#
# GitHub will pick this up automatically for blame views:
# https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view
# To use this file locally, run:
# git blame --ignore-revs-file .git-blame-ignore-revs
# To always use this file by default, run:
# git config --local blame.ignoreRevsFile .git-blame-ignore-revs
# To disable this functionality, run:
# git config --local blame.ignoreRevsFile ""
# Comments are optional, but may provide helpful context.
# 2023-04-20 Set default tab_size for JSON to 2 and apply new formatting
# https://github.com/zed-industries/zed/pull/2394
eca93c124a488b4e538946cd2d313bd571aa2b86
# 2024-02-25 Format JSON files in assets/
# https://github.com/zed-industries/zed/pull/8405
ffdda588b41f7d9d270ffe76cab116f828ad545e
# 2024-07-05 Improved formatting of default keymaps (single line per bind)
# https://github.com/zed-industries/zed/pull/13887
813cc3f5e537372fc86720b5e71b6e1c815440ab

View File

@@ -27,8 +27,7 @@ body:
attributes:
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
description: |
macOS: `~/Library/Logs/Zed/Zed.log`
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
Drag Zed.log into the text input below.
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
value: |
<details><summary>Zed.log</summary><pre>

View File

@@ -251,8 +251,6 @@ jobs:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-macos-x86_64.gz
target/zed-remote-server-macos-aarch64.gz
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
target/release/Zed.dmg
@@ -321,12 +319,11 @@ jobs:
- name: Upload app bundle to release
uses: softprops/action-gh-release@v1
if: ${{ env.RELEASE_CHANNEL == 'preview' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-linux-x86_64.gz
target/release/zed-linux-x86_64.tar.gz
files: target/release/zed-linux-x86_64.tar.gz
body: ""
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -409,9 +406,7 @@ jobs:
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
files: |
target/zed-remote-server-linux-aarch64.gz
target/release/zed-linux-aarch64.tar.gz
files: target/release/zed-linux-aarch64.tar.gz
body: ""
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -33,7 +33,6 @@ jobs:
- name: Run clippy
run: ./script/clippy
tests:
timeout-minutes: 60
name: Run tests
@@ -94,9 +93,9 @@ jobs:
- name: Upload Zed Nightly
run: script/upload-nightly macos
bundle-linux-x86:
bundle-deb:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for x86
name: Create a Linux *.tar.gz bundle
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
@@ -122,56 +121,8 @@ jobs:
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
bundle-linux-arm:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for ARM
if: github.repository_owner == 'zed-industries'
runs-on:
- hosted-linux-arm-1
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
clean: false
- name: "Setup jq"
uses: dcarbone/install-jq-action@v2
- name: Set up Clang
run: |
sudo apt-get update
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
- uses: rui314/setup-mold@v1
with:
mold-version: 2.32.0
- name: rustup
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Generate license file
run: script/generate-licenses
- name: Create Linux .tar.gz bundle
run: script/bundle-linux

434
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,7 +19,6 @@ members = [
"crates/collections",
"crates/command_palette",
"crates/command_palette_hooks",
"crates/completion",
"crates/copilot",
"crates/db",
"crates/dev_server_projects",
@@ -51,7 +50,6 @@ members = [
"crates/install_cli",
"crates/journal",
"crates/language",
"crates/language_model",
"crates/language_selector",
"crates/language_tools",
"crates/languages",
@@ -81,8 +79,6 @@ members = [
"crates/refineable",
"crates/refineable/derive_refineable",
"crates/release_channel",
"crates/remote",
"crates/remote_server",
"crates/repl",
"crates/rich_text",
"crates/rope",
@@ -141,7 +137,6 @@ members = [
"extensions/php",
"extensions/prisma",
"extensions/purescript",
"extensions/ruff",
"extensions/ruby",
"extensions/snippets",
"extensions/svelte",
@@ -165,7 +160,6 @@ assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" }
assistant_slash_command = { path = "crates/assistant_slash_command" }
assistant_tooling = { path = "crates/assistant_tooling" }
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" }
breadcrumbs = { path = "crates/breadcrumbs" }
@@ -179,7 +173,6 @@ collab_ui = { path = "crates/collab_ui" }
collections = { path = "crates/collections" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
completion = { path = "crates/completion" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
dev_server_projects = { path = "crates/dev_server_projects" }
@@ -209,7 +202,6 @@ inline_completion_button = { path = "crates/inline_completion_button" }
install_cli = { path = "crates/install_cli" }
journal = { path = "crates/journal" }
language = { path = "crates/language" }
language_model = { path = "crates/language_model" }
language_selector = { path = "crates/language_selector" }
language_tools = { path = "crates/language_tools" }
languages = { path = "crates/languages" }
@@ -239,8 +231,6 @@ proto = { path = "crates/proto" }
quick_action_bar = { path = "crates/quick_action_bar" }
recent_projects = { path = "crates/recent_projects" }
release_channel = { path = "crates/release_channel" }
remote = { path = "crates/remote" }
remote_server = { path = "crates/remote_server" }
repl = { path = "crates/repl" }
rich_text = { path = "crates/rich_text" }
rope = { path = "crates/rope" }
@@ -284,8 +274,7 @@ zed_actions = { path = "crates/zed_actions" }
alacritty_terminal = "0.23"
any_vec = "0.13"
anyhow = "1.0.57"
arrayvec = { version = "0.7.4", features = ["serde"] }
ashpd = "0.9.1"
ashpd = { git = "https://github.com/bilelmoussaoui/ashpd", rev = "29f2e1a" }
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = { version = "0.1" }
async-fs = "1.6"
@@ -295,11 +284,10 @@ async-trait = "0.1"
async-watch = "0.3.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
base64 = "0.13"
bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blade-util = { git = "https://github.com/zed-industries/blade", rev = "a477c2008db27db0b9f745715e119b3ee7ab7818" }
blake3 = "1.5.3"
bitflags = "2.4.2"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
blade-util = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
cap-std = "3.0"
cargo_toml = "0.20"
chrono = { version = "0.4", features = ["serde"] }
@@ -377,7 +365,6 @@ shellexpand = "2.1.0"
shlex = "1.3.0"
signal-hook = "0.3.17"
similar = "1.3"
simplelog = "0.12.2"
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
strum = { version = "0.25.0", features = ["derive"] }
@@ -393,7 +380,6 @@ time = { version = "0.3", features = [
"serde-well-known",
"formatting",
] }
tiny_http = "0.8"
toml = "0.8"
tokio = { version = "1", features = ["full"] }
tower-http = "0.4.4"
@@ -465,11 +451,11 @@ features = [
"Win32_System_Com_StructuredStorage",
"Win32_System_DataExchange",
"Win32_System_LibraryLoader",
"Win32_System_Memory",
"Win32_System_Ole",
"Win32_System_SystemInformation",
"Win32_System_SystemServices",
"Win32_System_Threading",
"Win32_System_Time",
"Win32_System_WinRT",
"Win32_UI_Controls",
"Win32_UI_HiDpi",
@@ -532,7 +518,7 @@ single_range_in_vec_init = "allow"
# There are a bunch of rules currently failing in the `style` group, so
# allow all of those, for now.
style = { level = "allow", priority = -1 }
style = "allow"
# Individual rules that have violations in the codebase:
almost_complete_range = "allow"

View File

@@ -1,3 +0,0 @@
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.49574 4.74787L5.99574 7.25214L8.49574 4.74787" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 246 B

View File

@@ -1,13 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_62_95)">
<path d="M4.5 6C3.67157 6 3 5.32843 3 4.5C3 3.67157 3.67157 3 4.5 3C5.32843 3 6 3.67157 6 4.5C6 5.32843 5.32843 6 4.5 6Z" stroke="white" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M6.54433 13.4334C6.87775 13.5227 7.22046 13.3249 7.3098 12.9914C7.39914 12.658 7.20127 12.3153 6.86786 12.226L6.54433 13.4334ZM3.77426 6.86772L3.93603 6.26401L2.72862 5.94049L2.56686 6.54419L3.77426 6.86772ZM6.86786 12.226C4.53394 11.6006 3.14889 9.20163 3.77426 6.86772L2.56686 6.54419C1.76281 9.54494 3.54359 12.6293 6.54433 13.4334L6.86786 12.226Z" fill="white"/>
<path d="M11.5 13C10.6716 13 10 12.3284 10 11.5C10 10.6716 10.6716 10 11.5 10C12.3284 10 13 10.6716 13 11.5C13 12.3284 12.3284 13 11.5 13Z" stroke="white" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.1875 4.21113C9.88852 4.03854 9.7861 3.65629 9.95869 3.35736C10.1313 3.05843 10.5135 2.95601 10.8125 3.12859L10.1875 4.21113ZM11.7888 10.1875C12.9969 8.09496 12.28 5.41925 10.1875 4.21113L10.8125 3.12859C13.5028 4.6819 14.4246 8.12209 12.8713 10.8125L11.7888 10.1875Z" fill="white"/>
</g>
<defs>
<clipPath id="clip0_62_95">
<rect width="16" height="16" fill="white" transform="matrix(-1 0 0 1 16 0)"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1,20 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_39_129)">
<path d="M22.0209 11.9553C22.0059 10.0068 21.4219 8.10512 20.3408 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.1001 2.18C11.355 1.93537 12.1493 1.93674 13.5027 2.10594" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M21.8198 10.1C22.0644 11.3548 22.0644 12.6451 21.8198 13.9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M20.2898 17.6C19.5716 18.6622 18.6548 19.5757 17.5898 20.29" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.9008 21.82C12.6459 22.0644 11.6432 22.1543 10.3883 21.91" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M2.18005 13.9C1.93543 12.6451 1.93543 11.3548 2.18005 10.1" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M3.70996 6.40002C4.42822 5.33775 5.34503 4.42433 6.40996 3.71002" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M12 13C12.5523 13 13 12.5523 13 12C13 11.4477 12.5523 11 12 11C11.4477 11 11 11.4477 11 12C11 12.5523 11.4477 13 12 13Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M1.99072 12.0748C2.00804 14.0118 2.58758 15.9021 3.65891 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_39_129">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -1,15 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_32_70)">
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.3999 21.9C12.3227 22.2159 14.2958 21.9632 16.0769 21.173C17.858 20.3827 19.3694 19.0893 20.4254 17.4517C21.4814 15.8142 22.036 13.9037 22.021 11.9553C22.006 10.0068 21.422 8.10512 20.3409 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.4998 2.10002C11.5849 1.8076 9.62631 2.07763 7.86198 2.87732C6.09765 3.677 4.60356 4.9719 3.56126 6.60468C2.51896 8.23745 1.97332 10.1378 1.99063 12.0748C2.00795 14.0118 2.58749 15.9021 3.65882 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10 15V9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M14 15V9" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_32_70">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -1,14 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_32_64)">
<path d="M19 7C20.1046 7 21 6.10457 21 5C21 3.89543 20.1046 3 19 3C17.8954 3 17 3.89543 17 5C17 6.10457 17.8954 7 19 7Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5 21C6.10457 21 7 20.1046 7 19C7 17.8954 6.10457 17 5 17C3.89543 17 3 17.8954 3 19C3 20.1046 3.89543 21 5 21Z" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.3999 21.9C12.3227 22.2159 14.2958 21.9632 16.0769 21.173C17.858 20.3827 19.3694 19.0893 20.4254 17.4517C21.4814 15.8142 22.036 13.9037 22.021 11.9553C22.006 10.0068 21.422 8.10512 20.3409 6.48401" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.4998 2.10002C11.5849 1.8076 9.62631 2.07763 7.86198 2.87732C6.09765 3.677 4.60356 4.9719 3.56126 6.60468C2.51896 8.23745 1.97332 10.1378 1.99063 12.0748C2.00795 14.0118 2.58749 15.9021 3.65882 17.516" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10 8.56055C10 8.32095 10.267 8.17803 10.4664 8.31094L15.6256 11.7504C15.8037 11.8691 15.8037 12.1309 15.6256 12.2496L10.4664 15.6891C10.267 15.822 10 15.6791 10 15.4394V8.56055Z" fill="white" stroke="white" stroke-linecap="round" stroke-linejoin="round"/>
</g>
<defs>
<clipPath id="clip0_32_64">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View File

@@ -19,6 +19,7 @@
"escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"ctrl-shift-w": "workspace::CloseWindow",
@@ -78,8 +79,10 @@
"shift-down": "editor::SelectDown",
"shift-left": "editor::SelectLeft",
"shift-right": "editor::SelectRight",
"ctrl-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
"ctrl-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::AddSelectionAbove",
"ctrl-shift-down": "editor::AddSelectionBelow",
"ctrl-shift-home": "editor::SelectToBeginning",
"ctrl-shift-end": "editor::SelectToEnd",
"ctrl-a": "editor::SelectAll",
@@ -97,7 +100,6 @@
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-'": "editor::ToggleHunkDiff",
"ctrl-\"": "editor::ExpandAllHunkDiffs",
"ctrl-i": "editor::ShowSignatureHelp",
"alt-g b": "editor::ToggleGitBlame"
}
},
@@ -107,6 +109,7 @@
"enter": "editor::Newline",
"shift-enter": "editor::Newline",
"ctrl-shift-enter": "editor::NewlineBelow",
"ctrl-enter": "editor::NewlineAbove",
"alt-z": "editor::ToggleSoftWrap",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": ["buffer_search::Deploy", { "replace_enabled": true }],
@@ -116,12 +119,6 @@
"ctrl-alt-e": "editor::SelectEnclosingSymbol"
}
},
{
"context": "Editor && mode == full && !jupyter",
"bindings": {
"ctrl-enter": "editor::NewlineAbove"
}
},
{
"context": "Editor && mode == full && inline_completion",
"bindings": {
@@ -210,7 +207,7 @@
}
},
{
"context": "ProjectSearchBar && in_replace > Editor",
"context": "ProjectSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-alt-enter": "search::ReplaceAll"
@@ -263,19 +260,20 @@
"bindings": {
"ctrl-[": "editor::Outdent",
"ctrl-]": "editor::Indent",
"shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above
"shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below
"shift-alt-up": "editor::AddSelectionAbove",
"shift-alt-down": "editor::AddSelectionBelow",
"ctrl-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"ctrl-alt-shift-up": "editor::DuplicateLineUp",
"ctrl-alt-shift-down": "editor::DuplicateLineDown",
"alt-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
"alt-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }],
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }],
"ctrl-shift-l": "editor::SelectAllMatches",
"ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": false }],
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }],
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }],
"ctrl-k ctrl-i": "editor::Hover",
@@ -328,7 +326,7 @@
"alt-9": ["pane::ActivateItem", 8],
"alt-0": "pane::ActivateLastItem",
"ctrl-alt--": "pane::GoBack",
"ctrl-alt-_": "pane::GoForward",
"ctrl-alt-shift--": "pane::GoForward",
"ctrl-shift-t": "pane::ReopenClosedItem",
"f3": "search::SelectNextMatch",
"shift-f3": "search::SelectPrevMatch",
@@ -399,7 +397,7 @@
"bindings": {
"ctrl-shift-k": "editor::DeleteLine",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-shift-j": "editor::JoinLines",
"ctrl-j": "editor::JoinLines",
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-delete": "editor::DeleteToNextSubwordEnd",
@@ -482,12 +480,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {
"ctrl-enter": "repl::Run"
}
},
{
"context": "ContextEditor > Editor",
"bindings": {
@@ -574,13 +566,6 @@
"tab": "channel_modal::ToggleMode"
}
},
{
"context": "Picker > Editor",
"bindings": {
"tab": "picker::ConfirmCompletion",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }]
}
},
{
"context": "ChannelModal > Picker > Editor",
"bindings": {
@@ -606,7 +591,6 @@
"ctrl-alt-space": "terminal::ShowCharacterPalette",
"shift-ctrl-c": "terminal::Copy",
"ctrl-insert": "terminal::Copy",
"ctrl-a": "editor::SelectAll",
"shift-ctrl-v": "terminal::Paste",
"shift-insert": "terminal::Paste",
"ctrl-enter": "assistant::InlineAssist",

View File

@@ -83,7 +83,7 @@
"ctrl-n": "editor::MoveDown",
"ctrl-b": "editor::MoveLeft",
"ctrl-f": "editor::MoveRight",
"ctrl-l": "editor::ScrollCursorCenter",
"ctrl-l": "editor::NextScreen",
"alt-left": "editor::MoveToPreviousWordStart",
"alt-b": "editor::MoveToPreviousWordStart",
"alt-right": "editor::MoveToNextWordEnd",
@@ -102,9 +102,9 @@
"ctrl-shift-b": "editor::SelectLeft",
"shift-right": "editor::SelectRight",
"ctrl-shift-f": "editor::SelectRight",
"alt-shift-left": "editor::SelectToPreviousWordStart", // cursorWordLeftSelect
"alt-shift-left": "editor::SelectToPreviousWordStart",
"alt-shift-b": "editor::SelectToPreviousWordStart",
"alt-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
"alt-shift-right": "editor::SelectToNextWordEnd",
"alt-shift-f": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectToStartOfParagraph",
"ctrl-shift-down": "editor::SelectToEndOfParagraph",
@@ -126,8 +126,7 @@
"cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-'": "editor::ToggleHunkDiff",
"cmd-\"": "editor::ExpandAllHunkDiffs",
"cmd-alt-g b": "editor::ToggleGitBlame",
"cmd-i": "editor::ShowSignatureHelp"
"cmd-alt-g b": "editor::ToggleGitBlame"
}
},
{
@@ -180,12 +179,6 @@
"cmd-c": "markdown::Copy"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {
"cmd-enter": "repl::Run"
}
},
{
"context": "AssistantPanel",
"bindings": {
@@ -261,7 +254,7 @@
}
},
{
"context": "ProjectSearchBar && in_replace > Editor",
"context": "ProjectSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"cmd-enter": "search::ReplaceAll"
@@ -308,20 +301,19 @@
"bindings": {
"cmd-[": "editor::Outdent",
"cmd-]": "editor::Indent",
"cmd-alt-up": "editor::AddSelectionAbove", // Insert cursor above
"cmd-alt-up": "editor::AddSelectionAbove",
"cmd-ctrl-p": "editor::AddSelectionAbove",
"cmd-alt-down": "editor::AddSelectionBelow", // Insert cursor below
"cmd-alt-down": "editor::AddSelectionBelow",
"cmd-ctrl-n": "editor::AddSelectionBelow",
"cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"alt-shift-up": "editor::DuplicateLineUp",
"alt-shift-down": "editor::DuplicateLineDown",
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
"cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
"cmd-f2": "editor::SelectAllMatches", // Select all occurrences of current word
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"cmd-d": ["editor::SelectNext", { "replace_newest": false }],
"cmd-shift-l": "editor::SelectAllMatches",
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }],
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }],
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }],
@@ -576,6 +568,12 @@
"space": "project_panel::Open"
}
},
{
"context": "Editor && jupyter",
"bindings": {
"cmd-enter": "repl::Run"
}
},
{
"context": "CollabPanel && not_editing",
"bindings": {
@@ -595,14 +593,6 @@
"tab": "channel_modal::ToggleMode"
}
},
{
"context": "Picker > Editor",
"bindings": {
"tab": "picker::ConfirmCompletion",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }]
}
},
{
"context": "ChannelModal > Picker > Editor",
"bindings": {
@@ -622,13 +612,20 @@
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Picker",
"bindings": {
"f2": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }]
}
},
{
"context": "Terminal",
"bindings": {
"ctrl-cmd-space": "terminal::ShowCharacterPalette",
"cmd-c": "terminal::Copy",
"cmd-v": "terminal::Paste",
"cmd-a": "editor::SelectAll",
"cmd-k": "terminal::Clear",
"ctrl-enter": "assistant::InlineAssist",
// Some nice conveniences

View File

@@ -1,21 +0,0 @@
// Zed keymap
//
// For information on binding keys, see the Zed
// documentation: https://zed.dev/docs/key-bindings
//
// To see the default key bindings run `zed: Open Default Keymap`
// from the command palette.
[
{
"context": "Workspace",
"bindings": {
// "shift shift": "file_finder::Toggle"
}
},
{
"context": "Editor",
"bindings": {
// "j k": ["workspace::SendKeystrokes", "escape"]
}
}
]

View File

@@ -25,7 +25,6 @@
"ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines
"ctrl-up": "editor::MoveLineUp", // editor:move-line-up
"ctrl-down": "editor::MoveLineDown", // editor:move-line-down
"ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle
"ctrl-shift-m": "markdown::OpenPreviewToTheSide" // markdown-preview:toggle
}
},

View File

@@ -13,7 +13,6 @@
"ctrl-shift-j": "editor::JoinLines",
"ctrl-d": "editor::DuplicateLineDown",
"ctrl-y": "editor::DeleteLine",
"ctrl-m": "editor::ScrollCursorCenter",
"ctrl-pagedown": "editor::MovePageDown",
"ctrl-pageup": "editor::MovePageUp",
// "ctrl-alt-shift-b": "editor::SelectToPreviousWordStart",

View File

@@ -3,10 +3,10 @@
"bindings": {
"ctrl-shift-[": "pane::ActivatePrevItem",
"ctrl-shift-]": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePrevItem"
"ctrl-pagedown": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivatePrevItem"
}
},
{
@@ -24,8 +24,6 @@
"ctrl-shift-f12": "editor::FindAllReferences",
"ctrl-.": "editor::GoToHunk",
"ctrl-,": "editor::GoToPrevHunk",
"ctrl-k ctrl-u": "editor::ConvertToUpperCase",
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
"ctrl-delete": "editor::DeleteToNextWordEnd"

View File

@@ -26,7 +26,6 @@
"cmd-shift-d": "editor::DuplicateLineDown",
"ctrl-cmd-up": "editor::MoveLineUp",
"ctrl-cmd-down": "editor::MoveLineDown",
"cmd-\\": "workspace::ToggleLeftDock",
"ctrl-shift-m": "markdown::OpenPreviewToTheSide"
}
},

View File

@@ -3,10 +3,10 @@
"bindings": {
"cmd-shift-[": "pane::ActivatePrevItem",
"cmd-shift-]": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivatePrevItem"
"ctrl-pagedown": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivateNextItem",
"ctrl-shift-tab": "pane::ActivateNextItem",
"ctrl-tab": "pane::ActivatePrevItem"
}
},
{
@@ -27,7 +27,6 @@
"ctrl-,": "editor::GoToPrevHunk",
"cmd-k cmd-u": "editor::ConvertToUpperCase",
"cmd-k cmd-l": "editor::ConvertToLowerCase",
"cmd-shift-j": "editor::JoinLines",
"shift-alt-m": "markdown::OpenPreviewToTheSide",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
"ctrl-delete": "editor::DeleteToNextWordEnd"

View File

@@ -1,6 +1,12 @@
[
{
"context": "VimControl && !menu",
"context": "ProjectPanel || Editor",
"bindings": {
"ctrl-6": "pane::AlternateFile"
}
},
{
"context": "Editor && VimControl && !VimWaiting && !menu",
"bindings": {
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
@@ -10,11 +16,7 @@
"backspace": "vim::Backspace",
"j": "vim::Down",
"down": "vim::Down",
"ctrl-j": "vim::Down",
"enter": "vim::NextLineStart",
"ctrl-m": "vim::NextLineStart",
"+": "vim::NextLineStart",
"-": "vim::PreviousLineStart",
"tab": "vim::Tab",
"shift-tab": "vim::Tab",
"k": "vim::Up",
@@ -195,20 +197,20 @@
"ctrl-w g shift-d": "editor::GoToTypeDefinitionSplit",
"ctrl-w space": "editor::OpenExcerptsSplit",
"ctrl-w g space": "editor::OpenExcerptsSplit",
"ctrl-6": "pane::AlternateFile"
"-": "pane::RevealInProjectPanel"
}
},
{
"context": "VimControl && VimCount",
"bindings": {
"0": ["vim::Number", 0]
}
},
{
"context": "vim_mode == normal",
// escape is in its own section so that it cancels a pending count.
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
"bindings": {
"escape": "editor::Cancel",
"ctrl-[": "editor::Cancel",
"ctrl-[": "editor::Cancel"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == none && !VimWaiting",
"bindings": {
".": "vim::Repeat",
"c": ["vim::PushOperator", "Change"],
"shift-c": "vim::ChangeToEndOfLine",
@@ -216,7 +218,7 @@
"shift-d": "vim::DeleteToEndOfLine",
"shift-j": "vim::JoinLines",
"y": ["vim::PushOperator", "Yank"],
"shift-y": "vim::YankToEndOfLine",
"shift-y": "vim::YankLine",
"i": "vim::InsertBefore",
"shift-i": "vim::InsertFirstNonWhitespace",
"a": "vim::InsertAfter",
@@ -252,12 +254,127 @@
"] d": "editor::GoToDiagnostic",
"[ d": "editor::GoToPrevDiagnostic",
"] c": "editor::GoToHunk",
"[ c": "editor::GoToPrevHunk",
"g c c": "vim::ToggleComments"
"[ c": "editor::GoToPrevHunk"
}
},
{
"context": "vim_mode == visual",
"context": "Editor && vim_mode == visual && vim_operator == none && !VimWaiting",
"bindings": {
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
}
},
{
"context": "Editor && VimCount && vim_mode != insert",
"bindings": {
"0": ["vim::Number", 0]
}
},
{
"context": "Editor && vim_operator == c",
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename" // zed specific
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == c",
"bindings": {
"s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
}
},
{
"context": "Editor && vim_operator == d",
"bindings": {
"d": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == gu",
"bindings": {
"g u": "vim::CurrentLine",
"u": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == gU",
"bindings": {
"g shift-u": "vim::CurrentLine",
"shift-u": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == g~",
"bindings": {
"g ~": "vim::CurrentLine",
"~": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == d",
"bindings": {
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "Editor && vim_operator == y",
"bindings": {
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == y",
"bindings": {
"s": ["vim::PushOperator", { "AddSurrounds": {} }]
}
},
{
"context": "Editor && vim_operator == ys",
"bindings": {
"s": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == >",
"bindings": {
">": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_operator == <",
"bindings": {
"<": "vim::CurrentLine"
}
},
{
"context": "Editor && VimObject",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignorePunctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
"'": "vim::Quotes",
"`": "vim::BackQuotes",
"\"": "vim::DoubleQuotes",
"|": "vim::VerticalBars",
"(": "vim::Parentheses",
")": "vim::Parentheses",
"b": "vim::Parentheses",
"[": "vim::SquareBrackets",
"]": "vim::SquareBrackets",
"{": "vim::CurlyBrackets",
"}": "vim::CurlyBrackets",
"shift-b": "vim::CurlyBrackets",
"<": "vim::AngleBrackets",
">": "vim::AngleBrackets",
"a": "vim::Argument"
}
},
{
"context": "Editor && vim_mode == visual && !VimWaiting && !VimObject",
"bindings": {
"u": "vim::ConvertToLowerCase",
"U": "vim::ConvertToUpperCase",
@@ -292,16 +409,23 @@
">": "vim::Indent",
"<": "vim::Outdent",
"i": ["vim::PushOperator", { "Object": { "around": false } }],
"a": ["vim::PushOperator", { "Object": { "around": true } }],
"g c": "vim::ToggleComments",
"\"": ["vim::PushOperator", "Register"],
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
"a": ["vim::PushOperator", { "Object": { "around": true } }]
}
},
{
"context": "vim_mode == insert",
"context": "Editor && vim_mode == normal && !VimWaiting",
"bindings": {
"g c c": "vim::ToggleComments"
}
},
{
"context": "Editor && vim_mode == visual",
"bindings": {
"g c": "vim::ToggleComments"
}
},
{
"context": "Editor && vim_mode == insert",
"bindings": {
"escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore",
@@ -320,118 +444,30 @@
}
},
{
"context": "vim_mode == replace",
"context": "Editor && vim_mode == replace",
"bindings": {
"escape": "vim::NormalBefore",
"ctrl-c": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore",
"backspace": "vim::UndoReplace",
"tab": "vim::Tab",
"enter": "vim::Enter"
"enter": "vim::Enter",
"backspace": "vim::UndoReplace"
}
},
{
"context": "vim_mode == waiting",
"context": "Editor && vim_mode != replace && VimWaiting",
"bindings": {
"tab": "vim::Tab",
"enter": "vim::Enter",
"escape": "vim::ClearOperators",
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators"
"escape": ["vim::SwitchMode", "Normal"],
"ctrl-[": ["vim::SwitchMode", "Normal"]
}
},
{
"context": "vim_mode == operator",
"context": "Editor && vim_mode == insert && VimWaiting",
"bindings": {
"escape": "vim::ClearOperators",
"ctrl-c": "vim::ClearOperators",
"ctrl-[": "vim::ClearOperators"
}
},
{
"context": "vim_operator == a || vim_operator == i || vim_operator == cs",
"bindings": {
"w": "vim::Word",
"shift-w": ["vim::Word", { "ignorePunctuation": true }],
"t": "vim::Tag",
"s": "vim::Sentence",
"p": "vim::Paragraph",
"'": "vim::Quotes",
"`": "vim::BackQuotes",
"\"": "vim::DoubleQuotes",
"|": "vim::VerticalBars",
"(": "vim::Parentheses",
")": "vim::Parentheses",
"b": "vim::Parentheses",
"[": "vim::SquareBrackets",
"]": "vim::SquareBrackets",
"{": "vim::CurlyBrackets",
"}": "vim::CurlyBrackets",
"shift-b": "vim::CurlyBrackets",
"<": "vim::AngleBrackets",
">": "vim::AngleBrackets",
"a": "vim::Argument"
}
},
{
"context": "vim_operator == c",
"bindings": {
"c": "vim::CurrentLine",
"d": "editor::Rename", // zed specific
"s": ["vim::PushOperator", { "ChangeSurrounds": {} }]
}
},
{
"context": "vim_operator == d",
"bindings": {
"d": "vim::CurrentLine",
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "vim_operator == gu",
"bindings": {
"g u": "vim::CurrentLine",
"u": "vim::CurrentLine"
}
},
{
"context": "vim_operator == gU",
"bindings": {
"g shift-u": "vim::CurrentLine",
"shift-u": "vim::CurrentLine"
}
},
{
"context": "vim_operator == g~",
"bindings": {
"g ~": "vim::CurrentLine",
"~": "vim::CurrentLine"
}
},
{
"context": "vim_operator == y",
"bindings": {
"y": "vim::CurrentLine",
"s": ["vim::PushOperator", { "AddSurrounds": {} }]
}
},
{
"context": "vim_operator == ys",
"bindings": {
"s": "vim::CurrentLine"
}
},
{
"context": "vim_operator == >",
"bindings": {
">": "vim::CurrentLine"
}
},
{
"context": "vim_operator == <",
"bindings": {
"<": "vim::CurrentLine"
"escape": "vim::NormalBefore",
"ctrl-[": "vim::NormalBefore"
}
},
{
@@ -471,8 +507,7 @@
"x": "project_panel::RevealInFileManager",
"shift-g": "menu::SelectLast",
"g g": "menu::SelectFirst",
"-": "project_panel::SelectParent",
"ctrl-6": "pane::AlternateFile"
"-": "project_panel::SelectParent"
}
},
{

View File

@@ -1,241 +0,0 @@
Your task is to map a step from the conversation above to operations on symbols inside the provided source files.
Guidelines:
- There's no need to describe *what* to do, just *where* to do it.
- If creating a file, assume any subsequent updates are included at the time of creation.
- Don't create and then update a file.
- We'll create it in one shot.
- Prefer updating symbols lower in the syntax tree if possible.
- Never include operations on a parent symbol and one of its children in the same <operations> block.
- Never nest an operation with another operation or include CDATA or other content. All operations are leaf nodes.
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
- Descriptions are required for all operations except delete.
- When generating multiple operations, ensure the descriptions are specific to each individual operation.
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
- Don't generate multiple operations at the same location. Instead, combine them together in a single operation with a succinct combined description.
The available operation types are:
1. <update>: Modify an existing symbol in a file.
2. <create_file>: Create a new file.
3. <insert_sibling_after>: Add a new symbol as sibling after an existing symbol in a file.
4. <append_child>: Add a new symbol as the last child of an existing symbol in a file.
5. <prepend_child>: Add a new symbol as the first child of an existing symbol in a file.
6. <delete>: Remove an existing symbol from a file. The `description` attribute is invalid for delete, but required for other ops.
All operations *require* a path.
Operations that *require* a symbol: <update>, <insert_sibling_after>, <delete>
Operations that don't allow a symbol: <create>
Operations that have an *optional* symbol: <prepend_child>, <append_child>
Example 1:
User:
```rs src/rectangle.rs
struct Rectangle {
width: f64,
height: f64,
}
impl Rectangle {
fn new(width: f64, height: f64) -> Self {
Rectangle { width, height }
}
}
```
Symbols for src/rectangle.rs:
- struct Rectangle
- impl Rectangle
- impl Rectangle fn new
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
<step>Implement the 'Display' trait for the Rectangle struct</step>
What are the operations for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
Assistant (wrong):
<operations>
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_area method" />
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_perimeter method" />
</operations>
This demonstrates what NOT to do. NEVER append multiple children at the same location.
Assistant (corrected):
<operations>
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate area and perimeter methods" />
</operations>
User:
What are the operations for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
Assistant:
<operations>
<insert_sibling_after path="src/shapes.rs" symbol="impl Rectangle" description="Implement Display trait for Rectangle"/>
</operations>
Example 2:
User:
```rs src/user.rs
struct User {
pub name: String,
age: u32,
email: String,
}
impl User {
fn new(name: String, age: u32, email: String) -> Self {
User { name, age, email }
}
pub fn print_info(&self) {
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
}
}
```
Symbols for src/user.rs:
- struct User
- struct User pub name
- struct User age
- struct User email
- impl User
- impl User fn new
- impl User pub fn print_info
<step>Update the 'print_info' method to use formatted output</step>
<step>Remove the 'email' field from the User struct</step>
What are the operations for the step: <step>Update the 'print_info' method to use formatted output</step>
Assistant:
<operations>
<update path="src/user.rs" symbol="impl User fn print_info" description="Use formatted output" />
</operations>
User:
What are the operations for the step: <step>Remove the 'email' field from the User struct</step>
Assistant:
<operations>
<delete path="src/user.rs" symbol="struct User email" description="Remove the email field" />
</operations>
Example 3:
User:
```rs src/vehicle.rs
struct Vehicle {
make: String,
model: String,
year: u32,
}
impl Vehicle {
fn new(make: String, model: String, year: u32) -> Self {
Vehicle { make, model, year }
}
fn print_year(&self) {
println!("Year: {}", self.year);
}
}
```
Symbols for src/vehicle.rs:
- struct Vehicle
- struct Vehicle make
- struct Vehicle model
- struct Vehicle year
- impl Vehicle
- impl Vehicle fn new
- impl Vehicle fn print_year
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
What are the operations for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
Assistant:
<operations>
<prepend_child path="src/vehicle.rs" description="Add 'use std::fmt' statement" />
</operations>
User:
What are the operations for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
Assistant:
<operations>
<insert_sibling_after path="src/vehicle.rs" symbol="impl Vehicle fn new" description="Add start_engine method"/>
</operations>
Example 4:
User:
```rs src/employee.rs
struct Employee {
name: String,
position: String,
salary: u32,
department: String,
}
impl Employee {
fn new(name: String, position: String, salary: u32, department: String) -> Self {
Employee { name, position, salary, department }
}
fn print_details(&self) {
println!("Name: {}, Position: {}, Salary: {}, Department: {}",
self.name, self.position, self.salary, self.department);
}
fn give_raise(&mut self, amount: u32) {
self.salary += amount;
}
}
```
Symbols for src/employee.rs:
- struct Employee
- struct Employee name
- struct Employee position
- struct Employee salary
- struct Employee department
- impl Employee
- impl Employee fn new
- impl Employee fn print_details
- impl Employee fn give_raise
<step>Make salary an f32</step>
What are the operations for the step: <step>Make salary an f32</step>
A (wrong):
<operations>
<update path="src/employee.rs" symbol="struct Employee" description="Change the type of salary to an f32" />
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
</operations>
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
A (corrected):
<operations>
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
</operations>
User:
What are the correct operations for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
A:
<operations>
<delete path="src/employee.rs" symbol="struct Employee department" />
<update path="src/employee.rs" symbol="impl Employee fn print_details" description="Don't print the 'department' field" />
</operations>
Now generate the operations for the following step.
Output only valid XML containing valid operations with their required attributes.
NEVER output code or any other text inside <operation> tags. If you do, you will replaced with another model.
Your response *MUST* begin with <operations> and end with </operations>:

View File

@@ -47,7 +47,7 @@
// },
"buffer_line_height": "comfortable",
// The name of a font to use for rendering text in the UI
// (On macOS) You can set this to ".SystemUIFont" to use the system font
// (On macOS) You can set this to ".SysmtemUIFont" to use the system font
"ui_font_family": "Zed Plex Sans",
// The OpenType features to enable for text in the UI
"ui_font_features": {
@@ -94,9 +94,6 @@
// 3. Never close the window
// "when_closing_with_no_tabs": "keep_window_open",
"when_closing_with_no_tabs": "platform_default",
// Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// How to highlight the current line in the editor.
@@ -119,11 +116,6 @@
// The debounce delay before re-querying the language server for completion
// documentation when not included in original completion list.
"completion_documentation_secondary_query_debounce": 300,
// Show method signatures in the editor, when inside parentheses.
"auto_signature_help": false,
/// Whether to show the signature help after completion or a bracket pair inserted.
/// If `auto_signature_help` is enabled, this setting will be treated as enabled also.
"show_signature_help_after_edits": true,
// Whether to show wrap guides (vertical rulers) in the editor.
// Setting this to true will show a guide at the 'preferred_line_length' value
// if softwrap is set to 'preferred_line_length', and will show any
@@ -136,7 +128,14 @@
// The default number of lines to expand excerpts in the multibuffer by.
"expand_excerpt_lines": 3,
// Globs to match against file paths to determine if a file is private.
"private_files": ["**/.env*", "**/*.pem", "**/*.key", "**/*.cert", "**/*.crt", "**/secrets.yml"],
"private_files": [
"**/.env*",
"**/*.pem",
"**/*.key",
"**/*.cert",
"**/*.crt",
"**/secrets.yml"
],
// Whether to use additional LSP queries to format (and amend) the code after
// every "trigger" symbol input, defined by LSP server capabilities.
"use_on_type_format": true,
@@ -263,8 +262,6 @@
// to both the horizontal and vertical delta values while scrolling.
"scroll_sensitivity": 1.0,
"relative_line_numbers": false,
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
"search_wrap": true,
// When to populate a new search's query based on the text under the cursor.
// This setting can take the following three values:
//
@@ -396,7 +393,6 @@
// 2. "gpt-4"
// 3. "gpt-4-turbo-preview"
// 4. "gpt-4o"
// 5. "gpt-4o-mini"
"default_model": "gpt-4o"
}
},
@@ -435,9 +431,7 @@
// Show git status colors in the editor tabs.
"git_status": false,
// Position of the close button on the editor tabs.
"close_position": "right",
// Whether to show the file icon for a tab.
"file_icons": false
"close_position": "right"
},
// Settings related to preview tabs.
"preview_tabs": {
@@ -545,14 +539,6 @@
// "delay_ms": 600
}
},
// Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration through the shell hook, works for POSIX shells and fish.
// "load_direnv": "shell_hook"
// 2. Load direnv configuration using `direnv export json` directly.
// This can help with some shells that otherwise would not detect
// the direnv environment, such as nushell or elvish.
// "load_direnv": "direct"
"load_direnv": "shell_hook",
"inline_completions": {
// A list of globs representing files that inline completions should be disabled for.
"disabled_globs": [".env"]
@@ -705,7 +691,7 @@
//
"file_types": {
"JSON": ["flake.lock"],
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "tsconfig.json"]
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json"]
},
// The extensions that Zed should automatically install on startup.
//
@@ -728,12 +714,10 @@
}
},
"C": {
"format_on_save": "off",
"use_on_type_format": false
"format_on_save": "off"
},
"C++": {
"format_on_save": "off",
"use_on_type_format": false
"format_on_save": "off"
},
"CSS": {
"prettier": {
@@ -743,9 +727,6 @@
"Elixir": {
"language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
},
"Erlang": {
"language_servers": ["erlang-ls", "!elp", "..."]
},
"Go": {
"code_actions_on_format": {
"source.organizeImports": true
@@ -788,13 +769,11 @@
},
"Markdown": {
"format_on_save": "off",
"use_on_type_format": false,
"prettier": {
"allowed": true
}
},
"PHP": {
"language_servers": ["phpactor", "!intelephense", "..."],
"prettier": {
"allowed": true,
"plugins": ["@prettier/plugin-php"],
@@ -802,7 +781,7 @@
}
},
"Ruby": {
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "..."]
"language_servers": ["solargraph", "!ruby-lsp", "..."]
},
"SCSS": {
"prettier": {
@@ -939,13 +918,5 @@
// Examples:
// - "proxy": "socks5://localhost:10808"
// - "proxy": "http://127.0.0.1:10809"
"proxy": null,
// Set to configure aliases for the command palette.
// When typing a query which is a key of this object, the value will be used instead.
//
// Examples:
// {
// "W": "workspace::Save"
// }
"command_aliases": {}
"proxy": null
}

View File

@@ -4,8 +4,8 @@
// documentation: https://zed.dev/docs/configuring-zed
//
// To see all of Zed's default settings without changing your
// custom settings, run the `zed: Open Default Settings` command
// from the command palette
// custom settings, run the `open default settings` command
// from the command palette or from `Zed` application menu.
{
"ui_font_size": 16,
"buffer_font_size": 16,

View File

@@ -38,7 +38,6 @@
"icon.accent": "#10a793ff",
"status_bar.background": "#262933ff",
"title_bar.background": "#262933ff",
"title_bar.inactive_background": "#21242bff",
"toolbar.background": "#1e2025ff",
"tab_bar.background": "#21242bff",
"tab.inactive_background": "#21242bff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#566ddaff",
"status_bar.background": "#3a353fff",
"title_bar.background": "#3a353fff",
"title_bar.inactive_background": "#221f26ff",
"toolbar.background": "#19171cff",
"tab_bar.background": "#221f26ff",
"tab.inactive_background": "#221f26ff",
@@ -423,7 +422,6 @@
"icon.accent": "#586cdaff",
"status_bar.background": "#bfbcc5ff",
"title_bar.background": "#bfbcc5ff",
"title_bar.inactive_background": "#e6e3ebff",
"toolbar.background": "#efecf4ff",
"tab_bar.background": "#e6e3ebff",
"tab.inactive_background": "#e6e3ebff",
@@ -808,7 +806,6 @@
"icon.accent": "#6684e0ff",
"status_bar.background": "#45433bff",
"title_bar.background": "#45433bff",
"title_bar.inactive_background": "#262622ff",
"toolbar.background": "#20201dff",
"tab_bar.background": "#262622ff",
"tab.inactive_background": "#262622ff",
@@ -1193,7 +1190,6 @@
"icon.accent": "#6684dfff",
"status_bar.background": "#cecab4ff",
"title_bar.background": "#cecab4ff",
"title_bar.inactive_background": "#eeebd7ff",
"toolbar.background": "#fefbecff",
"tab_bar.background": "#eeebd7ff",
"tab.inactive_background": "#eeebd7ff",
@@ -1578,7 +1574,6 @@
"icon.accent": "#36a165ff",
"status_bar.background": "#424136ff",
"title_bar.background": "#424136ff",
"title_bar.inactive_background": "#2c2b23ff",
"toolbar.background": "#22221bff",
"tab_bar.background": "#2c2b23ff",
"tab.inactive_background": "#2c2b23ff",
@@ -1963,7 +1958,6 @@
"icon.accent": "#37a165ff",
"status_bar.background": "#c5c4b9ff",
"title_bar.background": "#c5c4b9ff",
"title_bar.inactive_background": "#ebeae3ff",
"toolbar.background": "#f4f3ecff",
"tab_bar.background": "#ebeae3ff",
"tab.inactive_background": "#ebeae3ff",
@@ -2348,7 +2342,6 @@
"icon.accent": "#407ee6ff",
"status_bar.background": "#443c39ff",
"title_bar.background": "#443c39ff",
"title_bar.inactive_background": "#27211eff",
"toolbar.background": "#1b1918ff",
"tab_bar.background": "#27211eff",
"tab.inactive_background": "#27211eff",
@@ -2733,7 +2726,6 @@
"icon.accent": "#407ee6ff",
"status_bar.background": "#ccc7c5ff",
"title_bar.background": "#ccc7c5ff",
"title_bar.inactive_background": "#e9e6e4ff",
"toolbar.background": "#f0eeedff",
"tab_bar.background": "#e9e6e4ff",
"tab.inactive_background": "#e9e6e4ff",
@@ -3118,7 +3110,6 @@
"icon.accent": "#5169ebff",
"status_bar.background": "#433a43ff",
"title_bar.background": "#433a43ff",
"title_bar.inactive_background": "#252025ff",
"toolbar.background": "#1b181bff",
"tab_bar.background": "#252025ff",
"tab.inactive_background": "#252025ff",
@@ -3503,7 +3494,6 @@
"icon.accent": "#5169ebff",
"status_bar.background": "#c6b8c6ff",
"title_bar.background": "#c6b8c6ff",
"title_bar.inactive_background": "#e0d5e0ff",
"toolbar.background": "#f7f3f7ff",
"tab_bar.background": "#e0d5e0ff",
"tab.inactive_background": "#e0d5e0ff",
@@ -3888,7 +3878,6 @@
"icon.accent": "#267eadff",
"status_bar.background": "#33444dff",
"title_bar.background": "#33444dff",
"title_bar.inactive_background": "#1c2529ff",
"toolbar.background": "#161b1dff",
"tab_bar.background": "#1c2529ff",
"tab.inactive_background": "#1c2529ff",
@@ -4273,7 +4262,6 @@
"icon.accent": "#267eadff",
"status_bar.background": "#a6cadcff",
"title_bar.background": "#a6cadcff",
"title_bar.inactive_background": "#cdeaf9ff",
"toolbar.background": "#ebf8ffff",
"tab_bar.background": "#cdeaf9ff",
"tab.inactive_background": "#cdeaf9ff",
@@ -4658,7 +4646,6 @@
"icon.accent": "#7272caff",
"status_bar.background": "#3b3535ff",
"title_bar.background": "#3b3535ff",
"title_bar.inactive_background": "#252020ff",
"toolbar.background": "#1b1818ff",
"tab_bar.background": "#252020ff",
"tab.inactive_background": "#252020ff",
@@ -5043,7 +5030,6 @@
"icon.accent": "#7272caff",
"status_bar.background": "#c1bbbbff",
"title_bar.background": "#c1bbbbff",
"title_bar.inactive_background": "#ebe3e3ff",
"toolbar.background": "#f4ececff",
"tab_bar.background": "#ebe3e3ff",
"tab.inactive_background": "#ebe3e3ff",
@@ -5428,7 +5414,6 @@
"icon.accent": "#468b8fff",
"status_bar.background": "#353f39ff",
"title_bar.background": "#353f39ff",
"title_bar.inactive_background": "#1f2621ff",
"toolbar.background": "#171c19ff",
"tab_bar.background": "#1f2621ff",
"tab.inactive_background": "#1f2621ff",
@@ -5813,7 +5798,6 @@
"icon.accent": "#488b90ff",
"status_bar.background": "#bcc5bfff",
"title_bar.background": "#bcc5bfff",
"title_bar.inactive_background": "#e3ebe6ff",
"toolbar.background": "#ecf4eeff",
"tab_bar.background": "#e3ebe6ff",
"tab.inactive_background": "#e3ebe6ff",
@@ -6198,7 +6182,6 @@
"icon.accent": "#3e62f4ff",
"status_bar.background": "#3b453bff",
"title_bar.background": "#3b453bff",
"title_bar.inactive_background": "#1f231fff",
"toolbar.background": "#131513ff",
"tab_bar.background": "#1f231fff",
"tab.inactive_background": "#1f231fff",
@@ -6583,7 +6566,6 @@
"icon.accent": "#3e61f4ff",
"status_bar.background": "#b4ceb4ff",
"title_bar.background": "#b4ceb4ff",
"title_bar.inactive_background": "#daeedaff",
"toolbar.background": "#f3faf3ff",
"tab_bar.background": "#daeedaff",
"tab.inactive_background": "#daeedaff",
@@ -6968,7 +6950,6 @@
"icon.accent": "#3e8ed0ff",
"status_bar.background": "#3e4769ff",
"title_bar.background": "#3e4769ff",
"title_bar.inactive_background": "#262f51ff",
"toolbar.background": "#202646ff",
"tab_bar.background": "#262f51ff",
"tab.inactive_background": "#262f51ff",
@@ -7353,7 +7334,6 @@
"icon.accent": "#3e8fd0ff",
"status_bar.background": "#c1c5d8ff",
"title_bar.background": "#c1c5d8ff",
"title_bar.inactive_background": "#e5e8f5ff",
"toolbar.background": "#f5f7ffff",
"tab_bar.background": "#e5e8f5ff",
"tab.inactive_background": "#e5e8f5ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#5ac1feff",
"status_bar.background": "#313337ff",
"title_bar.background": "#313337ff",
"title_bar.inactive_background": "#1f2127ff",
"toolbar.background": "#0d1016ff",
"tab_bar.background": "#1f2127ff",
"tab.inactive_background": "#1f2127ff",
@@ -408,7 +407,6 @@
"icon.accent": "#3b9ee5ff",
"status_bar.background": "#dcdddeff",
"title_bar.background": "#dcdddeff",
"title_bar.inactive_background": "#ececedff",
"toolbar.background": "#fcfcfcff",
"tab_bar.background": "#ececedff",
"tab.inactive_background": "#ececedff",
@@ -778,7 +776,6 @@
"icon.accent": "#72cffeff",
"status_bar.background": "#464a52ff",
"title_bar.background": "#464a52ff",
"title_bar.inactive_background": "#353944ff",
"toolbar.background": "#242835ff",
"tab_bar.background": "#353944ff",
"tab.inactive_background": "#353944ff",

View File

@@ -47,7 +47,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#3a3735ff",
"toolbar.background": "#282828ff",
"tab_bar.background": "#3a3735ff",
"tab.inactive_background": "#3a3735ff",
@@ -431,7 +430,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#393634ff",
"toolbar.background": "#1d2021ff",
"tab_bar.background": "#393634ff",
"tab.inactive_background": "#393634ff",
@@ -815,7 +813,6 @@
"icon.accent": "#83a598ff",
"status_bar.background": "#4c4642ff",
"title_bar.background": "#4c4642ff",
"title_bar.inactive_background": "#3b3735ff",
"toolbar.background": "#32302fff",
"tab_bar.background": "#3b3735ff",
"tab.inactive_background": "#3b3735ff",
@@ -1199,7 +1196,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecddb4ff",
"toolbar.background": "#fbf1c7ff",
"tab_bar.background": "#ecddb4ff",
"tab.inactive_background": "#ecddb4ff",
@@ -1583,7 +1579,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecddb5ff",
"toolbar.background": "#f9f5d7ff",
"tab_bar.background": "#ecddb5ff",
"tab.inactive_background": "#ecddb5ff",
@@ -1967,7 +1962,6 @@
"icon.accent": "#0b6678ff",
"status_bar.background": "#d9c8a4ff",
"title_bar.background": "#d9c8a4ff",
"title_bar.inactive_background": "#ecdcb3ff",
"toolbar.background": "#f2e5bcff",
"tab_bar.background": "#ecdcb3ff",
"tab.inactive_background": "#ecdcb3ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#74ade8ff",
"status_bar.background": "#3b414dff",
"title_bar.background": "#3b414dff",
"title_bar.inactive_background": "#2e343eff",
"toolbar.background": "#282c33ff",
"tab_bar.background": "#2f343eff",
"tab.inactive_background": "#2f343eff",
@@ -413,7 +412,6 @@
"icon.accent": "#5c78e2ff",
"status_bar.background": "#dcdcddff",
"title_bar.background": "#dcdcddff",
"title_bar.inactive_background": "#ebebecff",
"toolbar.background": "#fafafaff",
"tab_bar.background": "#ebebecff",
"tab.inactive_background": "#ebebecff",
@@ -493,7 +491,7 @@
"info": "#5c78e2ff",
"info.background": "#e2e2faff",
"info.border": "#cbcdf6ff",
"modified": "#a47a23ff",
"modified": "#dec184ff",
"modified.background": "#faf2e6ff",
"modified.border": "#f4e7d1ff",
"predictive": "#9b9ec6ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#9bced6ff",
"status_bar.background": "#292738ff",
"title_bar.background": "#292738ff",
"title_bar.inactive_background": "#1c1b2aff",
"toolbar.background": "#191724ff",
"tab_bar.background": "#1c1b2aff",
"tab.inactive_background": "#1c1b2aff",
@@ -418,7 +417,6 @@
"icon.accent": "#57949fff",
"status_bar.background": "#dcd8d8ff",
"title_bar.background": "#dcd8d8ff",
"title_bar.inactive_background": "#fef9f2ff",
"toolbar.background": "#faf4edff",
"tab_bar.background": "#fef9f2ff",
"tab.inactive_background": "#fef9f2ff",
@@ -798,7 +796,6 @@
"icon.accent": "#9bced6ff",
"status_bar.background": "#38354eff",
"title_bar.background": "#38354eff",
"title_bar.inactive_background": "#28253cff",
"toolbar.background": "#232136ff",
"tab_bar.background": "#28253cff",
"tab.inactive_background": "#28253cff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#518b8bff",
"status_bar.background": "#333944ff",
"title_bar.background": "#333944ff",
"title_bar.inactive_background": "#2b3038ff",
"toolbar.background": "#282c33ff",
"tab_bar.background": "#2b3038ff",
"tab.inactive_background": "#2b3038ff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#278ad1ff",
"status_bar.background": "#073743ff",
"title_bar.background": "#073743ff",
"title_bar.inactive_background": "#04313bff",
"toolbar.background": "#002a35ff",
"tab_bar.background": "#04313bff",
"tab.inactive_background": "#04313bff",
@@ -408,7 +407,6 @@
"icon.accent": "#288bd1ff",
"status_bar.background": "#cfd0c4ff",
"title_bar.background": "#cfd0c4ff",
"title_bar.inactive_background": "#f3eddaff",
"toolbar.background": "#fdf6e3ff",
"tab_bar.background": "#f3eddaff",
"tab.inactive_background": "#f3eddaff",

View File

@@ -38,7 +38,6 @@
"icon.accent": "#499befff",
"status_bar.background": "#2a261cff",
"title_bar.background": "#2a261cff",
"title_bar.inactive_background": "#231f16ff",
"toolbar.background": "#1b1810ff",
"tab_bar.background": "#231f16ff",
"tab.inactive_background": "#231f16ff",

View File

@@ -107,7 +107,6 @@ impl ActivityIndicator {
Editor::for_buffer(buffer, Some(project.clone()), cx)
})),
None,
true,
cx,
);
})?;

View File

@@ -20,12 +20,6 @@ pub enum Model {
Claude3Sonnet,
#[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")]
Claude3Haiku,
#[serde(rename = "custom")]
Custom {
name: String,
#[serde(default)]
max_tokens: Option<usize>,
},
}
impl Model {
@@ -39,41 +33,30 @@ impl Model {
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
Ok(Self::Custom {
name: id.to_string(),
max_tokens: None,
})
Err(anyhow!("Invalid model id: {}", id))
}
}
pub fn id(&self) -> &str {
pub fn id(&self) -> &'static str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
Model::Claude3Opus => "claude-3-opus-20240229",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
Model::Claude3Haiku => "claude-3-opus-20240307",
Model::Custom { name, .. } => name,
}
}
pub fn display_name(&self) -> &str {
pub fn display_name(&self) -> &'static str {
match self {
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::Custom { name, .. } => name,
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Claude3_5Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 200_000,
Self::Custom { max_tokens, .. } => max_tokens.unwrap_or(200_000),
}
200_000
}
}
@@ -107,7 +90,6 @@ impl From<Role> for String {
#[derive(Debug, Serialize)]
pub struct Request {
#[serde(serialize_with = "serialize_request_model")]
pub model: Model,
pub messages: Vec<RequestMessage>,
pub stream: bool,
@@ -115,13 +97,6 @@ pub struct Request {
pub max_tokens: u32,
}
fn serialize_request_model<S>(model: &Model, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&model.id())
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,

View File

@@ -12,28 +12,17 @@ workspace = true
path = "src/assistant.rs"
doctest = false
[features]
test-support = [
"editor/test-support",
"language/test-support",
"project/test-support",
"text/test-support",
]
[dependencies]
anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
assets.workspace = true
assistant_slash_command.workspace = true
async-watch.workspace = true
breadcrumbs.workspace = true
cargo_toml.workspace = true
chrono.workspace = true
client.workspace = true
clock.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
completion.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
@@ -46,7 +35,6 @@ http.workspace = true
indexed_docs.workspace = true
indoc.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
menu.workspace = true
multi_buffer.workspace = true
@@ -66,26 +54,25 @@ serde_json.workspace = true
settings.workspace = true
similar.workspace = true
smol.workspace = true
strsim = "0.11"
strum.workspace = true
telemetry_events.workspace = true
terminal.workspace = true
terminal_view.workspace = true
theme.workspace = true
tiktoken-rs.workspace = true
toml.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
workspace.workspace = true
picker.workspace = true
roxmltree = "0.20.0"
[dev-dependencies]
completion = { workspace = true, features = ["test-support"] }
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
language = { workspace = true, features = ["test-support"] }
log.workspace = true
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -1,40 +1,40 @@
pub mod assistant_panel;
pub mod assistant_settings;
mod context;
pub mod context_store;
mod completion_provider;
mod context_store;
mod inline_assistant;
mod model_selector;
mod prompt_library;
mod prompts;
mod search;
mod slash_command;
mod streaming_diff;
mod terminal_inline_assistant;
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
use assistant_settings::AssistantSettings;
use assistant_settings::{AnthropicModel, AssistantSettings, CloudModel, OllamaModel, OpenAiModel};
use assistant_slash_command::SlashCommandRegistry;
use client::{proto, Client};
use command_palette_hooks::CommandPaletteFilter;
use completion::CompletionProvider;
pub use context::*;
pub use context_store::*;
pub(crate) use completion_provider::*;
pub(crate) use context_store::*;
use fs::Fs;
use gpui::{
actions, impl_actions, AppContext, BorrowAppContext, Global, SharedString, UpdateGlobal,
};
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
use indexed_docs::IndexedDocsRegistry;
pub(crate) use inline_assistant::*;
use language_model::LanguageModelResponseMessage;
pub(crate) use model_selector::*;
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore};
use slash_command::{
active_command, default_command, diagnostics_command, docs_command, fetch_command,
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
tabs_command, term_command,
file_command, now_command, project_command, prompt_command, search_command, tabs_command,
term_command,
};
use std::{
fmt::{self, Display},
sync::Arc,
};
use std::sync::Arc;
pub(crate) use streaming_diff::*;
actions!(
@@ -47,31 +47,167 @@ actions!(
InsertIntoEditor,
ToggleFocus,
ResetKey,
InlineAssist,
InsertActivePrompt,
DeployHistory,
DeployPromptLibrary,
ApplyEdit,
ConfirmCommand,
ToggleModelSelector,
DebugEditSteps
ToggleModelSelector
]
);
#[derive(Clone, Default, Deserialize, PartialEq)]
pub struct InlineAssist {
prompt: Option<String>,
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
struct MessageId(usize);
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
System,
}
impl_actions!(assistant, [InlineAssist]);
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct MessageId(clock::Lamport);
impl MessageId {
pub fn as_u64(self) -> u64 {
self.0.as_u64()
impl Role {
pub fn cycle(&mut self) {
*self = match self {
Role::User => Role::Assistant,
Role::Assistant => Role::System,
Role::System => Role::User,
}
}
}
impl Display for Role {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Role::User => write!(f, "user"),
Role::Assistant => write!(f, "assistant"),
Role::System => write!(f, "system"),
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
pub enum LanguageModel {
Cloud(CloudModel),
OpenAi(OpenAiModel),
Anthropic(AnthropicModel),
Ollama(OllamaModel),
}
impl Default for LanguageModel {
fn default() -> Self {
LanguageModel::Cloud(CloudModel::default())
}
}
impl LanguageModel {
pub fn telemetry_id(&self) -> String {
match self {
LanguageModel::OpenAi(model) => format!("openai/{}", model.id()),
LanguageModel::Anthropic(model) => format!("anthropic/{}", model.id()),
LanguageModel::Cloud(model) => format!("zed.dev/{}", model.id()),
LanguageModel::Ollama(model) => format!("ollama/{}", model.id()),
}
}
pub fn display_name(&self) -> String {
match self {
LanguageModel::OpenAi(model) => model.display_name().into(),
LanguageModel::Anthropic(model) => model.display_name().into(),
LanguageModel::Cloud(model) => model.display_name().into(),
LanguageModel::Ollama(model) => model.display_name().into(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
LanguageModel::OpenAi(model) => model.max_token_count(),
LanguageModel::Anthropic(model) => model.max_token_count(),
LanguageModel::Cloud(model) => model.max_token_count(),
LanguageModel::Ollama(model) => model.max_token_count(),
}
}
pub fn id(&self) -> &str {
match self {
LanguageModel::OpenAi(model) => model.id(),
LanguageModel::Anthropic(model) => model.id(),
LanguageModel::Cloud(model) => model.id(),
LanguageModel::Ollama(model) => model.id(),
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelRequestMessage {
pub role: Role,
pub content: String,
}
impl LanguageModelRequestMessage {
pub fn to_proto(&self) -> proto::LanguageModelRequestMessage {
proto::LanguageModelRequestMessage {
role: match self.role {
Role::User => proto::LanguageModelRole::LanguageModelUser,
Role::Assistant => proto::LanguageModelRole::LanguageModelAssistant,
Role::System => proto::LanguageModelRole::LanguageModelSystem,
} as i32,
content: self.content.clone(),
tool_calls: Vec::new(),
tool_call_id: None,
}
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct LanguageModelRequest {
pub model: LanguageModel,
pub messages: Vec<LanguageModelRequestMessage>,
pub stop: Vec<String>,
pub temperature: f32,
}
impl LanguageModelRequest {
pub fn to_proto(&self) -> proto::CompleteWithLanguageModel {
proto::CompleteWithLanguageModel {
model: self.model.id().to_string(),
messages: self.messages.iter().map(|m| m.to_proto()).collect(),
stop: self.stop.clone(),
temperature: self.temperature,
tool_choice: None,
tools: Vec::new(),
}
}
/// Before we send the request to the server, we can perform fixups on it appropriate to the model.
pub fn preprocess(&mut self) {
match &self.model {
LanguageModel::OpenAi(_) => {}
LanguageModel::Anthropic(_) => {}
LanguageModel::Ollama(_) => {}
LanguageModel::Cloud(model) => match model {
CloudModel::Claude3Opus
| CloudModel::Claude3Sonnet
| CloudModel::Claude3Haiku
| CloudModel::Claude3_5Sonnet => {
preprocess_anthropic_request(self);
}
_ => {}
},
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct LanguageModelResponseMessage {
pub role: Option<Role>,
pub content: Option<String>,
}
#[derive(Deserialize, Debug)]
pub struct LanguageModelUsage {
pub prompt_tokens: u32,
@@ -86,48 +222,19 @@ pub struct LanguageModelChoiceDelta {
pub finish_reason: Option<String>,
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum MessageStatus {
#[derive(Clone, Debug, Serialize, Deserialize)]
struct MessageMetadata {
role: Role,
status: MessageStatus,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum MessageStatus {
Pending,
Done,
Error(SharedString),
}
impl MessageStatus {
pub fn from_proto(status: proto::ContextMessageStatus) -> MessageStatus {
match status.variant {
Some(proto::context_message_status::Variant::Pending(_)) => MessageStatus::Pending,
Some(proto::context_message_status::Variant::Done(_)) => MessageStatus::Done,
Some(proto::context_message_status::Variant::Error(error)) => {
MessageStatus::Error(error.message.into())
}
None => MessageStatus::Pending,
}
}
pub fn to_proto(&self) -> proto::ContextMessageStatus {
match self {
MessageStatus::Pending => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Pending(
proto::context_message_status::Pending {},
)),
},
MessageStatus::Done => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Done(
proto::context_message_status::Done {},
)),
},
MessageStatus::Error(message) => proto::ContextMessageStatus {
variant: Some(proto::context_message_status::Variant::Error(
proto::context_message_status::Error {
message: message.to_string(),
},
)),
},
}
}
}
/// The state pertaining to the Assistant.
#[derive(Default)]
struct Assistant {
@@ -180,9 +287,8 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
})
.detach();
context_store::init(&client);
prompt_library::init(cx);
init_completion_provider(Arc::clone(&client), cx);
completion_provider::init(client.clone(), cx);
assistant_slash_command::init(cx);
register_slash_commands(cx);
assistant_panel::init(cx);
@@ -207,25 +313,10 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
.detach();
}
fn init_completion_provider(client: Arc<Client>, cx: &mut AppContext) {
let provider = assistant_settings::create_provider_from_settings(client.clone(), 0, cx);
cx.set_global(CompletionProvider::new(provider, Some(client)));
let mut settings_version = 0;
cx.observe_global::<SettingsStore>(move |cx| {
settings_version += 1;
cx.update_global::<CompletionProvider, _>(|provider, cx| {
assistant_settings::update_completion_provider_settings(provider, settings_version, cx);
})
})
.detach();
}
fn register_slash_commands(cx: &mut AppContext) {
let slash_command_registry = SlashCommandRegistry::global(cx);
slash_command_registry.register_command(file_command::FileSlashCommand, true);
slash_command_registry.register_command(active_command::ActiveSlashCommand, true);
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
slash_command_registry.register_command(tabs_command::TabsSlashCommand, true);
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
slash_command_registry.register_command(search_command::SearchSlashCommand, true);

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +1,154 @@
use std::{sync::Arc, time::Duration};
use std::fmt;
use anthropic::Model as AnthropicModel;
use client::Client;
use completion::{
AnthropicCompletionProvider, CloudCompletionProvider, CompletionProvider,
LanguageModelCompletionProvider, OllamaCompletionProvider, OpenAiCompletionProvider,
use crate::{preprocess_anthropic_request, LanguageModel, LanguageModelRequest};
pub use anthropic::Model as AnthropicModel;
use gpui::Pixels;
pub use ollama::Model as OllamaModel;
pub use open_ai::Model as OpenAiModel;
use schemars::{
schema::{InstanceType, Metadata, Schema, SchemaObject},
JsonSchema,
};
use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use gpui::{AppContext, Pixels};
use language_model::{CloudModel, LanguageModel};
use ollama::Model as OllamaModel;
use open_ai::Model as OpenAiModel;
use parking_lot::RwLock;
use schemars::{schema::Schema, JsonSchema};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use strum::{EnumIter, IntoEnumIterator};
#[derive(Clone, Debug, Default, PartialEq, EnumIter)]
pub enum CloudModel {
Gpt3Point5Turbo,
Gpt4,
Gpt4Turbo,
#[default]
Gpt4Omni,
Claude3_5Sonnet,
Claude3Opus,
Claude3Sonnet,
Claude3Haiku,
Custom(String),
}
impl Serialize for CloudModel {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.id())
}
}
impl<'de> Deserialize<'de> for CloudModel {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ZedDotDevModelVisitor;
impl<'de> Visitor<'de> for ZedDotDevModelVisitor {
type Value = CloudModel;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string for a ZedDotDevModel variant or a custom model")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
let model = CloudModel::iter()
.find(|model| model.id() == value)
.unwrap_or_else(|| CloudModel::Custom(value.to_string()));
Ok(model)
}
}
deserializer.deserialize_str(ZedDotDevModelVisitor)
}
}
impl JsonSchema for CloudModel {
fn schema_name() -> String {
"ZedDotDevModel".to_owned()
}
fn json_schema(_generator: &mut schemars::gen::SchemaGenerator) -> Schema {
let variants = CloudModel::iter()
.filter_map(|model| {
let id = model.id();
if id.is_empty() {
None
} else {
Some(id.to_string())
}
})
.collect::<Vec<_>>();
Schema::Object(SchemaObject {
instance_type: Some(InstanceType::String.into()),
enum_values: Some(variants.iter().map(|s| s.clone().into()).collect()),
metadata: Some(Box::new(Metadata {
title: Some("ZedDotDevModel".to_owned()),
default: Some(CloudModel::default().id().into()),
examples: variants.into_iter().map(Into::into).collect(),
..Default::default()
})),
..Default::default()
})
}
}
impl CloudModel {
pub fn id(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4Turbo => "gpt-4-turbo-preview",
Self::Gpt4Omni => "gpt-4o",
Self::Claude3_5Sonnet => "claude-3-5-sonnet",
Self::Claude3Opus => "claude-3-opus",
Self::Claude3Sonnet => "claude-3-sonnet",
Self::Claude3Haiku => "claude-3-haiku",
Self::Custom(id) => id,
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
Self::Gpt4 => "GPT 4",
Self::Gpt4Turbo => "GPT 4 Turbo",
Self::Gpt4Omni => "GPT 4 Omni",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::Custom(id) => id.as_str(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Gpt3Point5Turbo => 2048,
Self::Gpt4 => 4096,
Self::Gpt4Turbo | Self::Gpt4Omni => 128000,
Self::Claude3_5Sonnet
| Self::Claude3Opus
| Self::Claude3Sonnet
| Self::Claude3Haiku => 200000,
Self::Custom(_) => 4096, // TODO: Make this configurable
}
}
pub fn preprocess_request(&self, request: &mut LanguageModelRequest) {
match self {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => {
preprocess_anthropic_request(request)
}
_ => {}
}
}
}
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
@@ -473,124 +608,6 @@ fn merge<T>(target: &mut T, value: Option<T>) {
}
}
pub fn update_completion_provider_settings(
provider: &mut CompletionProvider,
version: usize,
cx: &mut AppContext,
) {
let updated = match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => provider
.update_current_as::<_, CloudCompletionProvider>(|provider| {
provider.update(model.clone(), version);
}),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => provider.update_current_as::<_, OpenAiCompletionProvider>(|provider| {
provider.update(
choose_openai_model(&model, &available_models),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => provider.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => provider.update_current_as::<_, OllamaCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
cx,
);
}),
};
// Previously configured provider was changed to another one
if updated.is_none() {
provider.update_provider(|client| create_provider_from_settings(client, version, cx));
}
}
pub(crate) fn create_provider_from_settings(
client: Arc<Client>,
settings_version: usize,
cx: &mut AppContext,
) -> Arc<RwLock<dyn LanguageModelCompletionProvider>> {
match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => Arc::new(RwLock::new(
CloudCompletionProvider::new(model.clone(), client.clone(), settings_version, cx),
)),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => Arc::new(RwLock::new(OpenAiCompletionProvider::new(
choose_openai_model(&model, &available_models),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
available_models.clone(),
))),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(AnthropicCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
))),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(OllamaCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
cx,
))),
}
}
/// Choose which model to use for openai provider.
/// If the model is not available, try to use the first available model, or fallback to the original model.
fn choose_openai_model(
model: &::open_ai::Model,
available_models: &[::open_ai::Model],
) -> ::open_ai::Model {
available_models
.iter()
.find(|&m| m == model)
.or_else(|| available_models.first())
.unwrap_or_else(|| model)
.clone()
}
#[cfg(test)]
mod tests {
use gpui::{AppContext, UpdateGlobal};
@@ -668,11 +685,7 @@ mod tests {
"version": "1",
"provider": {
"name": "zed.dev",
"default_model": {
"custom": {
"name": "custom-provider"
}
}
"default_model": "custom"
}
}
}"#,
@@ -683,10 +696,7 @@ mod tests {
assert_eq!(
AssistantSettings::get_global(cx).provider,
AssistantProvider::ZedDotDev {
model: CloudModel::Custom {
name: "custom-provider".into(),
max_tokens: None
}
model: CloudModel::Custom("custom".into())
}
);
}

View File

@@ -0,0 +1,373 @@
mod anthropic;
mod cloud;
#[cfg(test)]
mod fake;
mod ollama;
mod open_ai;
pub use anthropic::*;
pub use cloud::*;
#[cfg(test)]
pub use fake::*;
pub use ollama::*;
pub use open_ai::*;
use parking_lot::RwLock;
use smol::lock::{Semaphore, SemaphoreGuardArc};
use crate::{
assistant_settings::{AssistantProvider, AssistantSettings},
LanguageModel, LanguageModelRequest,
};
use anyhow::Result;
use client::Client;
use futures::{future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, AppContext, BorrowAppContext, Task, WindowContext};
use settings::{Settings, SettingsStore};
use std::time::Duration;
use std::{any::Any, sync::Arc};
/// Choose which model to use for openai provider.
/// If the model is not available, try to use the first available model, or fallback to the original model.
fn choose_openai_model(
model: &::open_ai::Model,
available_models: &[::open_ai::Model],
) -> ::open_ai::Model {
available_models
.iter()
.find(|&m| m == model)
.or_else(|| available_models.first())
.unwrap_or_else(|| model)
.clone()
}
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
let provider = create_provider_from_settings(client.clone(), 0, cx);
cx.set_global(CompletionProvider::new(provider, Some(client)));
let mut settings_version = 0;
cx.observe_global::<SettingsStore>(move |cx| {
settings_version += 1;
cx.update_global::<CompletionProvider, _>(|provider, cx| {
provider.update_settings(settings_version, cx);
})
})
.detach();
}
pub struct CompletionResponse {
pub inner: BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>,
_lock: SemaphoreGuardArc,
}
pub trait LanguageModelCompletionProvider: Send + Sync {
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel>;
fn settings_version(&self) -> usize;
fn is_authenticated(&self) -> bool;
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>>;
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView;
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>>;
fn model(&self) -> LanguageModel;
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>>;
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
const MAX_CONCURRENT_COMPLETION_REQUESTS: usize = 4;
pub struct CompletionProvider {
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
request_limiter: Arc<Semaphore>,
}
impl CompletionProvider {
pub fn new(
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
) -> Self {
Self {
provider,
client,
request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_COMPLETION_REQUESTS)),
}
}
pub fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
self.provider.read().available_models(cx)
}
pub fn settings_version(&self) -> usize {
self.provider.read().settings_version()
}
pub fn is_authenticated(&self) -> bool {
self.provider.read().is_authenticated()
}
pub fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().authenticate(cx)
}
pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
self.provider.read().authentication_prompt(cx)
}
pub fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().reset_credentials(cx)
}
pub fn model(&self) -> LanguageModel {
self.provider.read().model()
}
pub fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
self.provider.read().count_tokens(request, cx)
}
pub fn complete(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> Task<CompletionResponse> {
let rate_limiter = self.request_limiter.clone();
let provider = self.provider.clone();
cx.background_executor().spawn(async move {
let lock = rate_limiter.acquire_arc().await;
let response = provider.read().complete(request);
CompletionResponse {
inner: response,
_lock: lock,
}
})
}
}
impl gpui::Global for CompletionProvider {}
impl CompletionProvider {
pub fn global(cx: &AppContext) -> &Self {
cx.global::<Self>()
}
pub fn update_current_as<R, T: LanguageModelCompletionProvider + 'static>(
&mut self,
update: impl FnOnce(&mut T) -> R,
) -> Option<R> {
let mut provider = self.provider.write();
if let Some(provider) = provider.as_any_mut().downcast_mut::<T>() {
Some(update(provider))
} else {
None
}
}
pub fn update_settings(&mut self, version: usize, cx: &mut AppContext) {
let updated = match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => self
.update_current_as::<_, CloudCompletionProvider>(|provider| {
provider.update(model.clone(), version);
}),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => self.update_current_as::<_, OpenAiCompletionProvider>(|provider| {
provider.update(
choose_openai_model(&model, &available_models),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => self.update_current_as::<_, AnthropicCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
);
}),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => self.update_current_as::<_, OllamaCompletionProvider>(|provider| {
provider.update(
model.clone(),
api_url.clone(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
version,
cx,
);
}),
};
// Previously configured provider was changed to another one
if updated.is_none() {
if let Some(client) = self.client.clone() {
self.provider = create_provider_from_settings(client, version, cx);
} else {
log::warn!("completion provider cannot be created because client is not set");
}
}
}
}
fn create_provider_from_settings(
client: Arc<Client>,
settings_version: usize,
cx: &mut AppContext,
) -> Arc<RwLock<dyn LanguageModelCompletionProvider>> {
match &AssistantSettings::get_global(cx).provider {
AssistantProvider::ZedDotDev { model } => Arc::new(RwLock::new(
CloudCompletionProvider::new(model.clone(), client.clone(), settings_version, cx),
)),
AssistantProvider::OpenAi {
model,
api_url,
low_speed_timeout_in_seconds,
available_models,
} => Arc::new(RwLock::new(OpenAiCompletionProvider::new(
choose_openai_model(&model, &available_models),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
))),
AssistantProvider::Anthropic {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(AnthropicCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
))),
AssistantProvider::Ollama {
model,
api_url,
low_speed_timeout_in_seconds,
} => Arc::new(RwLock::new(OllamaCompletionProvider::new(
model.clone(),
api_url.clone(),
client.http_client(),
low_speed_timeout_in_seconds.map(Duration::from_secs),
settings_version,
cx,
))),
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use gpui::AppContext;
use parking_lot::RwLock;
use settings::SettingsStore;
use smol::stream::StreamExt;
use crate::{
completion_provider::MAX_CONCURRENT_COMPLETION_REQUESTS, CompletionProvider,
FakeCompletionProvider, LanguageModelRequest,
};
#[gpui::test]
fn test_rate_limiting(cx: &mut AppContext) {
SettingsStore::test(cx);
let fake_provider = FakeCompletionProvider::setup_test(cx);
let provider = CompletionProvider::new(Arc::new(RwLock::new(fake_provider.clone())), None);
// Enqueue some requests
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
let response = provider.complete(
LanguageModelRequest {
temperature: i as f32 / 10.0,
..Default::default()
},
cx,
);
cx.background_executor()
.spawn(async move {
let response = response.await;
let mut stream = response.inner.await.unwrap();
while let Some(message) = stream.next().await {
message.unwrap();
}
})
.detach();
}
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Get the first completion request that is in flight and mark it as completed.
let completion = fake_provider
.running_completions()
.into_iter()
.next()
.unwrap();
fake_provider.finish_completion(&completion);
// Ensure that the number of in-flight completion requests is reduced.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
cx.background_executor().run_until_parked();
// Ensure that another completion request was allowed to acquire the lock.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Mark all completion requests as finished that are in flight.
for request in fake_provider.running_completions() {
fake_provider.finish_completion(&request);
}
assert_eq!(fake_provider.completion_count(), 0);
// Wait until the background tasks acquire the lock again.
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
// Finish all remaining completion requests.
for request in fake_provider.running_completions() {
fake_provider.finish_completion(&request);
}
cx.background_executor().run_until_parked();
assert_eq!(fake_provider.completion_count(), 0);
}
}

View File

@@ -1,12 +1,14 @@
use crate::{count_open_ai_tokens, LanguageModelCompletionProvider};
use crate::{CompletionProvider, LanguageModel, LanguageModelRequest};
use anthropic::{stream_completion, Model as AnthropicModel, Request, RequestMessage};
use crate::{
assistant_settings::AnthropicModel, CompletionProvider, LanguageModel, LanguageModelRequest,
Role,
};
use crate::{count_open_ai_tokens, LanguageModelCompletionProvider, LanguageModelRequestMessage};
use anthropic::{stream_completion, Request, RequestMessage};
use anyhow::{anyhow, Result};
use editor::{Editor, EditorElement, EditorStyle};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AnyView, AppContext, Task, TextStyle, View};
use gpui::{AnyView, AppContext, FontStyle, Task, TextStyle, View, WhiteSpace};
use http::HttpClient;
use language_model::Role;
use settings::Settings;
use std::time::Duration;
use std::{env, sync::Arc};
@@ -25,7 +27,7 @@ pub struct AnthropicCompletionProvider {
}
impl LanguageModelCompletionProvider for AnthropicCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
AnthropicModel::iter()
.map(LanguageModel::Anthropic)
.collect()
@@ -92,7 +94,7 @@ impl LanguageModelCompletionProvider for AnthropicCompletionProvider {
count_open_ai_tokens(request, cx.background_executor())
}
fn stream_completion(
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
@@ -174,7 +176,7 @@ impl AnthropicCompletionProvider {
}
fn to_anthropic_request(&self, mut request: LanguageModelRequest) -> Request {
request.preprocess_anthropic();
preprocess_anthropic_request(&mut request);
let model = match request.model {
LanguageModel::Anthropic(model) => model,
@@ -211,6 +213,49 @@ impl AnthropicCompletionProvider {
}
}
pub fn preprocess_anthropic_request(request: &mut LanguageModelRequest) {
let mut new_messages: Vec<LanguageModelRequestMessage> = Vec::new();
let mut system_message = String::new();
for message in request.messages.drain(..) {
if message.content.is_empty() {
continue;
}
match message.role {
Role::User | Role::Assistant => {
if let Some(last_message) = new_messages.last_mut() {
if last_message.role == message.role {
last_message.content.push_str("\n\n");
last_message.content.push_str(&message.content);
continue;
}
}
new_messages.push(message);
}
Role::System => {
if !system_message.is_empty() {
system_message.push_str("\n\n");
}
system_message.push_str(&message.content);
}
}
}
if !system_message.is_empty() {
new_messages.insert(
0,
LanguageModelRequestMessage {
role: Role::System,
content: system_message,
},
);
}
request.messages = new_messages;
}
struct AuthenticationPrompt {
api_key: View<Editor>,
api_url: String,
@@ -257,8 +302,12 @@ impl AuthenticationPrompt {
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.api_key,

View File

@@ -1,12 +1,11 @@
use crate::{
count_open_ai_tokens, CompletionProvider, LanguageModel, LanguageModelCompletionProvider,
LanguageModelRequest,
assistant_settings::CloudModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
LanguageModelCompletionProvider, LanguageModelRequest,
};
use anyhow::{anyhow, Result};
use client::{proto, Client};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt, TryFutureExt};
use gpui::{AnyView, AppContext, Task};
use language_model::CloudModel;
use std::{future, sync::Arc};
use strum::IntoEnumIterator;
use ui::prelude::*;
@@ -53,16 +52,16 @@ impl CloudCompletionProvider {
}
impl LanguageModelCompletionProvider for CloudCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
let mut custom_model = if matches!(self.model, CloudModel::Custom { .. }) {
Some(self.model.clone())
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
let mut custom_model = if let CloudModel::Custom(custom_model) = self.model.clone() {
Some(custom_model)
} else {
None
};
CloudModel::iter()
.filter_map(move |model| {
if let CloudModel::Custom { .. } = model {
custom_model.take()
if let CloudModel::Custom(_) = model {
Some(CloudModel::Custom(custom_model.take()?))
} else {
Some(model)
}
@@ -117,9 +116,9 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::Cloud(CloudModel::Custom { name, .. }) => {
LanguageModel::Cloud(CloudModel::Custom(model)) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model: name,
model,
messages: request
.messages
.iter()
@@ -136,7 +135,7 @@ impl LanguageModelCompletionProvider for CloudCompletionProvider {
}
}
fn stream_completion(
fn complete(
&self,
mut request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {

View File

@@ -13,6 +13,7 @@ pub struct FakeCompletionProvider {
}
impl FakeCompletionProvider {
#[cfg(test)]
pub fn setup_test(cx: &mut AppContext) -> Self {
use crate::CompletionProvider;
use parking_lot::RwLock;
@@ -23,7 +24,7 @@ impl FakeCompletionProvider {
this
}
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
pub fn running_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
.lock()
.keys()
@@ -35,7 +36,7 @@ impl FakeCompletionProvider {
self.current_completion_txs.lock().len()
}
pub fn send_completion_chunk(&self, request: &LanguageModelRequest, chunk: String) {
pub fn send_completion(&self, request: &LanguageModelRequest, chunk: String) {
let json = serde_json::to_string(request).unwrap();
self.current_completion_txs
.lock()
@@ -45,24 +46,15 @@ impl FakeCompletionProvider {
.unwrap();
}
pub fn send_last_completion_chunk(&self, chunk: String) {
self.send_completion_chunk(self.pending_completions().last().unwrap(), chunk);
}
pub fn finish_completion(&self, request: &LanguageModelRequest) {
self.current_completion_txs
.lock()
.remove(&serde_json::to_string(request).unwrap())
.unwrap();
}
pub fn finish_last_completion(&self) {
self.finish_completion(self.pending_completions().last().unwrap());
.remove(&serde_json::to_string(request).unwrap());
}
}
impl LanguageModelCompletionProvider for FakeCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
vec![LanguageModel::default()]
}
@@ -98,7 +90,7 @@ impl LanguageModelCompletionProvider for FakeCompletionProvider {
futures::future::ready(Ok(0)).boxed()
}
fn stream_completion(
fn complete(
&self,
_request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {

View File

@@ -1,14 +1,15 @@
use crate::LanguageModelCompletionProvider;
use crate::{CompletionProvider, LanguageModel, LanguageModelRequest};
use crate::{
assistant_settings::OllamaModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
};
use anyhow::Result;
use futures::StreamExt as _;
use futures::{future::BoxFuture, stream::BoxStream, FutureExt};
use gpui::{AnyView, AppContext, Task};
use http::HttpClient;
use language_model::Role;
use ollama::Model as OllamaModel;
use ollama::{
get_models, preload_model, stream_chat_completion, ChatMessage, ChatOptions, ChatRequest,
Role as OllamaRole,
};
use std::sync::Arc;
use std::time::Duration;
@@ -27,7 +28,7 @@ pub struct OllamaCompletionProvider {
}
impl LanguageModelCompletionProvider for OllamaCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
fn available_models(&self, _cx: &AppContext) -> Vec<LanguageModel> {
self.available_models
.iter()
.map(|m| LanguageModel::Ollama(m.clone()))
@@ -90,7 +91,7 @@ impl LanguageModelCompletionProvider for OllamaCompletionProvider {
async move { Ok(token_count) }.boxed()
}
fn stream_completion(
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
@@ -261,6 +262,16 @@ impl OllamaCompletionProvider {
}
}
impl From<Role> for ollama::Role {
fn from(val: Role) -> Self {
match val {
Role::User => OllamaRole::User,
Role::Assistant => OllamaRole::Assistant,
Role::System => OllamaRole::System,
}
}
}
struct DownloadOllamaMessage {
retry_connection: Box<dyn Fn(&mut WindowContext) -> Task<Result<()>>>,
}

View File

@@ -1,13 +1,15 @@
use crate::CompletionProvider;
use crate::assistant_settings::CloudModel;
use crate::assistant_settings::{AssistantProvider, AssistantSettings};
use crate::LanguageModelCompletionProvider;
use crate::{
assistant_settings::OpenAiModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
};
use anyhow::{anyhow, Result};
use editor::{Editor, EditorElement, EditorStyle};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{AnyView, AppContext, Task, TextStyle, View};
use gpui::{AnyView, AppContext, FontStyle, Task, TextStyle, View, WhiteSpace};
use http::HttpClient;
use language_model::{CloudModel, LanguageModel, LanguageModelRequest, Role};
use open_ai::Model as OpenAiModel;
use open_ai::{stream_completion, Request, RequestMessage};
use open_ai::{stream_completion, Request, RequestMessage, Role as OpenAiRole};
use settings::Settings;
use std::time::Duration;
use std::{env, sync::Arc};
@@ -23,7 +25,6 @@ pub struct OpenAiCompletionProvider {
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
available_models_from_settings: Vec<OpenAiModel>,
}
impl OpenAiCompletionProvider {
@@ -33,7 +34,6 @@ impl OpenAiCompletionProvider {
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
available_models_from_settings: Vec<OpenAiModel>,
) -> Self {
Self {
api_key: None,
@@ -42,7 +42,6 @@ impl OpenAiCompletionProvider {
http_client,
low_speed_timeout,
settings_version,
available_models_from_settings,
}
}
@@ -93,26 +92,30 @@ impl OpenAiCompletionProvider {
}
impl LanguageModelCompletionProvider for OpenAiCompletionProvider {
fn available_models(&self) -> Vec<LanguageModel> {
if self.available_models_from_settings.is_empty() {
let available_models = if matches!(self.model, OpenAiModel::Custom { .. }) {
vec![self.model.clone()]
} else {
OpenAiModel::iter()
.filter(|model| !matches!(model, OpenAiModel::Custom { .. }))
.collect()
};
available_models
.into_iter()
.map(LanguageModel::OpenAi)
.collect()
} else {
self.available_models_from_settings
.iter()
.cloned()
.map(LanguageModel::OpenAi)
.collect()
fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
if let AssistantProvider::OpenAi {
available_models, ..
} = &AssistantSettings::get_global(cx).provider
{
if !available_models.is_empty() {
return available_models
.iter()
.cloned()
.map(LanguageModel::OpenAi)
.collect();
}
}
let available_models = if matches!(self.model, OpenAiModel::Custom { .. }) {
vec![self.model.clone()]
} else {
OpenAiModel::iter()
.filter(|model| !matches!(model, OpenAiModel::Custom { .. }))
.collect()
};
available_models
.into_iter()
.map(LanguageModel::OpenAi)
.collect()
}
fn settings_version(&self) -> usize {
@@ -176,7 +179,7 @@ impl LanguageModelCompletionProvider for OpenAiCompletionProvider {
count_open_ai_tokens(request, cx.background_executor())
}
fn stream_completion(
fn complete(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
@@ -241,7 +244,6 @@ pub fn count_open_ai_tokens(
| LanguageModel::Cloud(CloudModel::Claude3Opus)
| LanguageModel::Cloud(CloudModel::Claude3Sonnet)
| LanguageModel::Cloud(CloudModel::Claude3Haiku)
| LanguageModel::Cloud(CloudModel::Custom { .. })
| LanguageModel::OpenAi(OpenAiModel::Custom { .. }) => {
// Tiktoken doesn't yet support these models, so we manually use the
// same tokenizer as GPT-4.
@@ -253,6 +255,16 @@ pub fn count_open_ai_tokens(
.boxed()
}
impl From<Role> for open_ai::Role {
fn from(val: Role) -> Self {
match val {
Role::User => OpenAiRole::User,
Role::Assistant => OpenAiRole::Assistant,
Role::System => OpenAiRole::System,
}
}
}
struct AuthenticationPrompt {
api_key: View<Editor>,
api_url: String,
@@ -299,8 +311,12 @@ impl AuthenticationPrompt {
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.api_key,

File diff suppressed because it is too large Load Diff

View File

@@ -1,117 +1,97 @@
use crate::{
Context, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext,
SavedContextMetadata,
};
use anyhow::{anyhow, Context as _, Result};
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
use clock::ReplicaId;
use crate::{assistant_settings::OpenAiModel, MessageId, MessageMetadata};
use anyhow::{anyhow, Result};
use assistant_slash_command::SlashCommandOutputSection;
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use fuzzy::StringMatchCandidate;
use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, Task, WeakModel};
use language::LanguageRegistry;
use gpui::{AppContext, Model, ModelContext, Task};
use paths::contexts_dir;
use project::Project;
use regex::Regex;
use std::{
cmp::Reverse,
ffi::OsStr,
mem,
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc, time::Duration};
use ui::Context;
use util::{ResultExt, TryFutureExt};
pub fn init(client: &Arc<Client>) {
client.add_model_message_handler(ContextStore::handle_advertise_contexts);
client.add_model_request_handler(ContextStore::handle_open_context);
client.add_model_message_handler(ContextStore::handle_update_context);
client.add_model_request_handler(ContextStore::handle_synchronize_contexts);
#[derive(Serialize, Deserialize)]
pub struct SavedMessage {
pub id: MessageId,
pub start: usize,
}
#[derive(Serialize, Deserialize)]
pub struct SavedContext {
pub id: Option<String>,
pub zed: String,
pub version: String,
pub text: String,
pub messages: Vec<SavedMessage>,
pub message_metadata: HashMap<MessageId, MessageMetadata>,
pub summary: String,
pub slash_command_output_sections: Vec<SlashCommandOutputSection<usize>>,
}
impl SavedContext {
pub const VERSION: &'static str = "0.3.0";
}
#[derive(Serialize, Deserialize)]
pub struct SavedContextV0_2_0 {
pub id: Option<String>,
pub zed: String,
pub version: String,
pub text: String,
pub messages: Vec<SavedMessage>,
pub message_metadata: HashMap<MessageId, MessageMetadata>,
pub summary: String,
}
#[derive(Serialize, Deserialize)]
struct SavedContextV0_1_0 {
id: Option<String>,
zed: String,
version: String,
text: String,
messages: Vec<SavedMessage>,
message_metadata: HashMap<MessageId, MessageMetadata>,
summary: String,
api_url: Option<String>,
model: OpenAiModel,
}
#[derive(Clone)]
pub struct RemoteContextMetadata {
pub id: ContextId,
pub summary: Option<String>,
pub struct SavedContextMetadata {
pub title: String,
pub path: PathBuf,
pub mtime: chrono::DateTime<chrono::Local>,
}
pub struct ContextStore {
contexts: Vec<ContextHandle>,
contexts_metadata: Vec<SavedContextMetadata>,
host_contexts: Vec<RemoteContextMetadata>,
fs: Arc<dyn Fs>,
languages: Arc<LanguageRegistry>,
telemetry: Arc<Telemetry>,
_watch_updates: Task<Option<()>>,
client: Arc<Client>,
project: Model<Project>,
project_is_shared: bool,
client_subscription: Option<client::Subscription>,
_project_subscriptions: Vec<gpui::Subscription>,
}
enum ContextHandle {
Weak(WeakModel<Context>),
Strong(Model<Context>),
}
impl ContextHandle {
fn upgrade(&self) -> Option<Model<Context>> {
match self {
ContextHandle::Weak(weak) => weak.upgrade(),
ContextHandle::Strong(strong) => Some(strong.clone()),
}
}
fn downgrade(&self) -> WeakModel<Context> {
match self {
ContextHandle::Weak(weak) => weak.clone(),
ContextHandle::Strong(strong) => strong.downgrade(),
}
}
}
impl ContextStore {
pub fn new(project: Model<Project>, cx: &mut AppContext) -> Task<Result<Model<Self>>> {
let fs = project.read(cx).fs().clone();
let languages = project.read(cx).languages().clone();
let telemetry = project.read(cx).client().telemetry().clone();
pub fn new(fs: Arc<dyn Fs>, cx: &mut AppContext) -> Task<Result<Model<Self>>> {
cx.spawn(|mut cx| async move {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
let this = cx.new_model(|cx: &mut ModelContext<Self>| {
let mut this = Self {
contexts: Vec::new(),
contexts_metadata: Vec::new(),
host_contexts: Vec::new(),
fs,
languages,
telemetry,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
anyhow::Ok(())
let this = cx.new_model(|cx: &mut ModelContext<Self>| Self {
contexts_metadata: Vec::new(),
fs,
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
.log_err()
}),
client_subscription: None,
_project_subscriptions: vec![
cx.observe(&project, Self::handle_project_changed),
cx.subscribe(&project, Self::handle_project_event),
],
project_is_shared: false,
client: project.read(cx).client(),
project: project.clone(),
};
this.handle_project_changed(project, cx);
this.synchronize_contexts(cx);
this
anyhow::Ok(())
}
.log_err()
}),
})?;
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
@@ -120,431 +100,52 @@ impl ContextStore {
})
}
async fn handle_advertise_contexts(
this: Model<Self>,
envelope: TypedEnvelope<proto::AdvertiseContexts>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.host_contexts = envelope
.payload
.contexts
.into_iter()
.map(|context| RemoteContextMetadata {
id: ContextId::from_proto(context.context_id),
summary: context.summary,
})
.collect();
cx.notify();
})
}
async fn handle_open_context(
this: Model<Self>,
envelope: TypedEnvelope<proto::OpenContext>,
mut cx: AsyncAppContext,
) -> Result<proto::OpenContextResponse> {
let context_id = ContextId::from_proto(envelope.payload.context_id);
let operations = this.update(&mut cx, |this, cx| {
if this.project.read(cx).is_remote() {
return Err(anyhow!("only the host contexts can be opened"));
}
let context = this
.loaded_context_for_id(&context_id, cx)
.context("context not found")?;
if context.read(cx).replica_id() != ReplicaId::default() {
return Err(anyhow!("context must be opened via the host"));
}
anyhow::Ok(
context
.read(cx)
.serialize_ops(&ContextVersion::default(), cx),
)
})??;
let operations = operations.await;
Ok(proto::OpenContextResponse {
context: Some(proto::Context { operations }),
})
}
async fn handle_update_context(
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateContext>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
let context_id = ContextId::from_proto(envelope.payload.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
let operation_proto = envelope.payload.operation.context("invalid operation")?;
let operation = ContextOperation::from_proto(operation_proto)?;
context.update(cx, |context, cx| context.apply_ops([operation], cx))?;
}
Ok(())
})?
}
async fn handle_synchronize_contexts(
this: Model<Self>,
envelope: TypedEnvelope<proto::SynchronizeContexts>,
mut cx: AsyncAppContext,
) -> Result<proto::SynchronizeContextsResponse> {
this.update(&mut cx, |this, cx| {
if this.project.read(cx).is_remote() {
return Err(anyhow!("only the host can synchronize contexts"));
}
let mut local_versions = Vec::new();
for remote_version_proto in envelope.payload.contexts {
let remote_version = ContextVersion::from_proto(&remote_version_proto);
let context_id = ContextId::from_proto(remote_version_proto.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
let context = context.read(cx);
let operations = context.serialize_ops(&remote_version, cx);
local_versions.push(context.version(cx).to_proto(context_id.clone()));
let client = this.client.clone();
let project_id = envelope.payload.project_id;
cx.background_executor()
.spawn(async move {
let operations = operations.await;
for operation in operations {
client.send(proto::UpdateContext {
project_id,
context_id: context_id.to_proto(),
operation: Some(operation),
})?;
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
}
this.advertise_contexts(cx);
anyhow::Ok(proto::SynchronizeContextsResponse {
contexts: local_versions,
})
})?
}
fn handle_project_changed(&mut self, _: Model<Project>, cx: &mut ModelContext<Self>) {
let is_shared = self.project.read(cx).is_shared();
let was_shared = mem::replace(&mut self.project_is_shared, is_shared);
if is_shared == was_shared {
return;
}
if is_shared {
self.contexts.retain_mut(|context| {
if let Some(strong_context) = context.upgrade() {
*context = ContextHandle::Strong(strong_context);
true
} else {
false
}
});
let remote_id = self.project.read(cx).remote_id().unwrap();
self.client_subscription = self
.client
.subscribe_to_entity(remote_id)
.log_err()
.map(|subscription| subscription.set_model(&cx.handle(), &mut cx.to_async()));
self.advertise_contexts(cx);
} else {
self.client_subscription = None;
}
}
fn handle_project_event(
&mut self,
_: Model<Project>,
event: &project::Event,
cx: &mut ModelContext<Self>,
) {
match event {
project::Event::Reshared => {
self.advertise_contexts(cx);
}
project::Event::HostReshared | project::Event::Rejoined => {
self.synchronize_contexts(cx);
}
project::Event::DisconnectedFromHost => {
self.contexts.retain_mut(|context| {
if let Some(strong_context) = context.upgrade() {
*context = ContextHandle::Weak(context.downgrade());
strong_context.update(cx, |context, cx| {
if context.replica_id() != ReplicaId::default() {
context.set_capability(language::Capability::ReadOnly, cx);
}
});
true
} else {
false
}
});
self.host_contexts.clear();
cx.notify();
}
_ => {}
}
}
pub fn create(&mut self, cx: &mut ModelContext<Self>) -> Model<Context> {
let context = cx.new_model(|cx| {
Context::local(self.languages.clone(), Some(self.telemetry.clone()), cx)
});
self.register_context(&context, cx);
context
}
pub fn open_local_context(
&mut self,
path: PathBuf,
cx: &ModelContext<Self>,
) -> Task<Result<Model<Context>>> {
if let Some(existing_context) = self.loaded_context_for_path(&path, cx) {
return Task::ready(Ok(existing_context));
}
pub fn load(&self, path: PathBuf, cx: &AppContext) -> Task<Result<SavedContext>> {
let fs = self.fs.clone();
let languages = self.languages.clone();
let telemetry = self.telemetry.clone();
let load = cx.background_executor().spawn({
let path = path.clone();
async move {
let saved_context = fs.load(&path).await?;
SavedContext::from_json(&saved_context)
}
});
cx.spawn(|this, mut cx| async move {
let saved_context = load.await?;
let context = cx.new_model(|cx| {
Context::deserialize(saved_context, path.clone(), languages, Some(telemetry), cx)
})?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
existing_context
} else {
this.register_context(&context, cx);
context
}
})
})
}
fn loaded_context_for_path(&self, path: &Path, cx: &AppContext) -> Option<Model<Context>> {
self.contexts.iter().find_map(|context| {
let context = context.upgrade()?;
if context.read(cx).path() == Some(path) {
Some(context)
} else {
None
}
})
}
fn loaded_context_for_id(&self, id: &ContextId, cx: &AppContext) -> Option<Model<Context>> {
self.contexts.iter().find_map(|context| {
let context = context.upgrade()?;
if context.read(cx).id() == id {
Some(context)
} else {
None
}
})
}
pub fn open_remote_context(
&mut self,
context_id: ContextId,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Context>>> {
let project = self.project.read(cx);
let Some(project_id) = project.remote_id() else {
return Task::ready(Err(anyhow!("project was not remote")));
};
if project.is_local() {
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
}
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
return Task::ready(Ok(context));
}
let replica_id = project.replica_id();
let capability = project.capability();
let language_registry = self.languages.clone();
let telemetry = self.telemetry.clone();
let request = self.client.request(proto::OpenContext {
project_id,
context_id: context_id.to_proto(),
});
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let context_proto = response.context.context("invalid context")?;
let context = cx.new_model(|cx| {
Context::new(
context_id.clone(),
replica_id,
capability,
language_registry,
Some(telemetry),
cx,
)
})?;
let operations = cx
.background_executor()
.spawn(async move {
context_proto
.operations
.into_iter()
.map(|op| ContextOperation::from_proto(op))
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
this.register_context(&context, cx);
this.synchronize_contexts(cx);
context
}
})
})
}
fn register_context(&mut self, context: &Model<Context>, cx: &mut ModelContext<Self>) {
let handle = if self.project_is_shared {
ContextHandle::Strong(context.clone())
} else {
ContextHandle::Weak(context.downgrade())
};
self.contexts.push(handle);
self.advertise_contexts(cx);
cx.subscribe(context, Self::handle_context_event).detach();
}
fn handle_context_event(
&mut self,
context: Model<Context>,
event: &ContextEvent,
cx: &mut ModelContext<Self>,
) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
match event {
ContextEvent::SummaryChanged => {
self.advertise_contexts(cx);
}
ContextEvent::Operation(operation) => {
let context_id = context.read(cx).id().to_proto();
let operation = operation.to_proto();
self.client
.send(proto::UpdateContext {
project_id,
context_id,
operation: Some(operation),
})
.log_err();
}
_ => {}
}
}
fn advertise_contexts(&self, cx: &AppContext) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
// For now, only the host can advertise their open contexts.
if self.project.read(cx).is_remote() {
return;
}
let contexts = self
.contexts
.iter()
.rev()
.filter_map(|context| {
let context = context.upgrade()?.read(cx);
if context.replica_id() == ReplicaId::default() {
Some(proto::ContextMetadata {
context_id: context.id().to_proto(),
summary: context.summary().map(|summary| summary.text.clone()),
})
} else {
None
}
})
.collect();
self.client
.send(proto::AdvertiseContexts {
project_id,
contexts,
})
.ok();
}
fn synchronize_contexts(&mut self, cx: &mut ModelContext<Self>) {
let Some(project_id) = self.project.read(cx).remote_id() else {
return;
};
let contexts = self
.contexts
.iter()
.filter_map(|context| {
let context = context.upgrade()?.read(cx);
if context.replica_id() != ReplicaId::default() {
Some(context.version(cx).to_proto(context.id().clone()))
} else {
None
}
})
.collect();
let client = self.client.clone();
let request = self.client.request(proto::SynchronizeContexts {
project_id,
contexts,
});
cx.spawn(|this, cx| async move {
let response = request.await?;
let mut context_ids = Vec::new();
let mut operations = Vec::new();
this.read_with(&cx, |this, cx| {
for context_version_proto in response.contexts {
let context_version = ContextVersion::from_proto(&context_version_proto);
let context_id = ContextId::from_proto(context_version_proto.context_id);
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
context_ids.push(context_id);
operations.push(context.read(cx).serialize_ops(&context_version, cx));
cx.background_executor().spawn(async move {
let saved_context = fs.load(&path).await?;
let saved_context_json = serde_json::from_str::<serde_json::Value>(&saved_context)?;
match saved_context_json
.get("version")
.ok_or_else(|| anyhow!("version not found"))?
{
serde_json::Value::String(version) => match version.as_str() {
SavedContext::VERSION => {
Ok(serde_json::from_value::<SavedContext>(saved_context_json)?)
}
}
})?;
let operations = futures::future::join_all(operations).await;
for (context_id, operations) in context_ids.into_iter().zip(operations) {
for operation in operations {
client.send(proto::UpdateContext {
project_id,
context_id: context_id.to_proto(),
operation: Some(operation),
})?;
}
"0.2.0" => {
let saved_context =
serde_json::from_value::<SavedContextV0_2_0>(saved_context_json)?;
Ok(SavedContext {
id: saved_context.id,
zed: saved_context.zed,
version: saved_context.version,
text: saved_context.text,
messages: saved_context.messages,
message_metadata: saved_context.message_metadata,
summary: saved_context.summary,
slash_command_output_sections: Vec::new(),
})
}
"0.1.0" => {
let saved_context =
serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
Ok(SavedContext {
id: saved_context.id,
zed: saved_context.zed,
version: saved_context.version,
text: saved_context.text,
messages: saved_context.messages,
message_metadata: saved_context.message_metadata,
summary: saved_context.summary,
slash_command_output_sections: Vec::new(),
})
}
_ => Err(anyhow!("unrecognized saved context version: {}", version)),
},
_ => Err(anyhow!("version not found on saved context")),
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
pub fn search(&self, query: String, cx: &AppContext) -> Task<Vec<SavedContextMetadata>> {
@@ -577,10 +178,6 @@ impl ContextStore {
})
}
pub fn host_contexts(&self) -> &[RemoteContextMetadata] {
&self.host_contexts
}
fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move {

View File

@@ -1,6 +1,7 @@
use crate::{
assistant_settings::AssistantSettings, humanize_token_count, prompts::generate_content_prompt,
AssistantPanel, AssistantPanelEvent, CompletionProvider, Hunk, StreamingDiff,
AssistantPanel, AssistantPanelEvent, CompletionProvider, Hunk, LanguageModelRequest,
LanguageModelRequestMessage, Role, StreamingDiff,
};
use anyhow::{anyhow, Context as _, Result};
use client::telemetry::Telemetry;
@@ -8,36 +9,27 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
actions::{MoveDown, MoveUp, SelectAll},
display_map::{
BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock,
ToDisplayPoint,
},
Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
};
use fs::Fs;
use futures::{
channel::mpsc,
future::LocalBoxFuture,
stream::{self, BoxStream},
SinkExt, Stream, StreamExt,
};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{
point, AppContext, EventEmitter, FocusHandle, FocusableView, Global, HighlightStyle, Model,
ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
WindowContext,
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, Global, HighlightStyle,
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
WhiteSpace, WindowContext,
};
use language::{Buffer, Point, Selection, TransactionId};
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use rope::Rope;
use settings::{update_settings_file, Settings};
use similar::TextDiff;
use smol::future::FutureExt;
use std::{
cmp,
future::Future,
mem,
cmp, mem,
ops::{Range, RangeInclusive},
pin::Pin,
sync::Arc,
@@ -45,7 +37,7 @@ use std::{
time::{Duration, Instant},
};
use theme::ThemeSettings;
use ui::{prelude::*, ContextMenu, IconButtonShape, PopoverMenu, Tooltip};
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use util::RangeExt;
use workspace::{notifications::NotificationId, Toast, Workspace};
@@ -87,7 +79,6 @@ impl InlineAssistant {
editor: &View<Editor>,
workspace: Option<WeakView<Workspace>>,
assistant_panel: Option<&View<AssistantPanel>>,
initial_prompt: Option<String>,
cx: &mut WindowContext,
) {
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
@@ -139,11 +130,11 @@ impl InlineAssistant {
}
let assist_group_id = self.next_assist_group_id.post_inc();
let prompt_buffer =
cx.new_model(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx));
let prompt_buffer = cx.new_model(|cx| Buffer::local("", cx));
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
let mut assists = Vec::new();
let mut assist_blocks = Vec::new();
let mut assist_to_focus = None;
for range in codegen_ranges {
let assist_id = self.next_assist_id.post_inc();
@@ -151,7 +142,6 @@ impl InlineAssistant {
Codegen::new(
editor.read(cx).buffer().clone(),
range.clone(),
None,
self.telemetry.clone(),
cx,
)
@@ -184,18 +174,42 @@ impl InlineAssistant {
}
}
let [prompt_block_id, end_block_id] =
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
assists.push((assist_id, prompt_editor, prompt_block_id, end_block_id));
assist_blocks.push(BlockProperties {
style: BlockStyle::Sticky,
position: range.start,
height: prompt_editor.read(cx).height_in_lines,
render: build_assist_editor_renderer(&prompt_editor),
disposition: BlockDisposition::Above,
});
assist_blocks.push(BlockProperties {
style: BlockStyle::Sticky,
position: range.end,
height: 1,
render: Box::new(|cx| {
v_flex()
.h_full()
.w_full()
.border_t_1()
.border_color(cx.theme().status().info_border)
.into_any_element()
}),
disposition: BlockDisposition::Below,
});
assists.push((assist_id, prompt_editor));
}
let assist_block_ids = editor.update(cx, |editor, cx| {
editor.insert_blocks(assist_blocks, None, cx)
});
let editor_assists = self
.assists_by_editor
.entry(editor.downgrade())
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
let mut assist_group = InlineAssistGroup::new();
for (assist_id, prompt_editor, prompt_block_id, end_block_id) in assists {
for ((assist_id, prompt_editor), block_ids) in
assists.into_iter().zip(assist_block_ids.chunks_exact(2))
{
self.assists.insert(
assist_id,
InlineAssist::new(
@@ -204,8 +218,8 @@ impl InlineAssistant {
assistant_panel.is_some(),
editor,
&prompt_editor,
prompt_block_id,
end_block_id,
block_ids[0],
block_ids[1],
prompt_editor.read(cx).codegen.clone(),
workspace.clone(),
cx,
@@ -221,128 +235,6 @@ impl InlineAssistant {
}
}
#[allow(clippy::too_many_arguments)]
pub fn suggest_assist(
&mut self,
editor: &View<Editor>,
mut range: Range<Anchor>,
initial_prompt: String,
initial_insertion: Option<String>,
workspace: Option<WeakView<Workspace>>,
assistant_panel: Option<&View<AssistantPanel>>,
cx: &mut WindowContext,
) -> InlineAssistId {
let assist_group_id = self.next_assist_group_id.post_inc();
let prompt_buffer = cx.new_model(|cx| Buffer::local(&initial_prompt, cx));
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
let assist_id = self.next_assist_id.post_inc();
let buffer = editor.read(cx).buffer().clone();
let prepend_transaction_id = initial_insertion.and_then(|initial_insertion| {
buffer.update(cx, |buffer, cx| {
buffer.start_transaction(cx);
buffer.edit([(range.start..range.start, initial_insertion)], None, cx);
buffer.end_transaction(cx)
})
});
range.start = range.start.bias_left(&buffer.read(cx).read(cx));
range.end = range.end.bias_right(&buffer.read(cx).read(cx));
let codegen = cx.new_model(|cx| {
Codegen::new(
editor.read(cx).buffer().clone(),
range.clone(),
prepend_transaction_id,
self.telemetry.clone(),
cx,
)
});
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
let prompt_editor = cx.new_view(|cx| {
PromptEditor::new(
assist_id,
gutter_dimensions.clone(),
self.prompt_history.clone(),
prompt_buffer.clone(),
codegen.clone(),
editor,
assistant_panel,
workspace.clone(),
self.fs.clone(),
cx,
)
});
let [prompt_block_id, end_block_id] =
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
let editor_assists = self
.assists_by_editor
.entry(editor.downgrade())
.or_insert_with(|| EditorInlineAssists::new(&editor, cx));
let mut assist_group = InlineAssistGroup::new();
self.assists.insert(
assist_id,
InlineAssist::new(
assist_id,
assist_group_id,
assistant_panel.is_some(),
editor,
&prompt_editor,
prompt_block_id,
end_block_id,
prompt_editor.read(cx).codegen.clone(),
workspace.clone(),
cx,
),
);
assist_group.assist_ids.push(assist_id);
editor_assists.assist_ids.push(assist_id);
self.assist_groups.insert(assist_group_id, assist_group);
assist_id
}
fn insert_assist_blocks(
&self,
editor: &View<Editor>,
range: &Range<Anchor>,
prompt_editor: &View<PromptEditor>,
cx: &mut WindowContext,
) -> [CustomBlockId; 2] {
let assist_blocks = vec![
BlockProperties {
style: BlockStyle::Sticky,
position: range.start,
height: prompt_editor.read(cx).height_in_lines,
render: build_assist_editor_renderer(prompt_editor),
disposition: BlockDisposition::Above,
},
BlockProperties {
style: BlockStyle::Sticky,
position: range.end,
height: 1,
render: Box::new(|cx| {
v_flex()
.h_full()
.w_full()
.border_t_1()
.border_color(cx.theme().status().info_border)
.into_any_element()
}),
disposition: BlockDisposition::Below,
},
];
editor.update(cx, |editor, cx| {
let block_ids = editor.insert_blocks(assist_blocks, None, cx);
[block_ids[0], block_ids[1]]
})
}
fn handle_prompt_editor_focus_in(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
let assist = &self.assists[&assist_id];
let Some(decorations) = assist.decorations.as_ref() else {
@@ -487,14 +379,6 @@ impl InlineAssistant {
cx.propagate();
}
fn handle_editor_release(&mut self, editor: WeakView<Editor>, cx: &mut WindowContext) {
if let Some(editor_assists) = self.assists_by_editor.get_mut(&editor) {
for assist_id in editor_assists.assist_ids.clone() {
self.finish_assist(assist_id, true, cx);
}
}
}
fn handle_editor_change(&mut self, editor: View<Editor>, cx: &mut WindowContext) {
let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) else {
return;
@@ -814,7 +698,7 @@ impl InlineAssistant {
assist_group.assist_ids.clone()
}
pub fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
fn start_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
assist
} else {
@@ -843,26 +727,16 @@ impl InlineAssistant {
self.prompt_history.pop_front();
}
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
let codegen = assist.codegen.clone();
let telemetry_id = CompletionProvider::global(cx).model().telemetry_id();
let chunks: LocalBoxFuture<Result<BoxStream<Result<String>>>> =
if user_prompt.trim().to_lowercase() == "delete" {
async { Ok(stream::empty().boxed()) }.boxed_local()
} else {
let request = self.request_for_inline_assist(assist_id, cx);
let mut cx = cx.to_async();
async move {
let request = request.await?;
let chunks = cx
.update(|cx| CompletionProvider::global(cx).stream_completion(request, cx))?
.await?;
Ok(chunks.boxed())
}
.boxed_local()
};
codegen.update(cx, |codegen, cx| {
codegen.start(telemetry_id, chunks, cx);
});
let request = self.request_for_inline_assist(assist_id, cx);
cx.spawn(|mut cx| async move {
let request = request.await?;
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
fn request_for_inline_assist(
@@ -981,7 +855,7 @@ impl InlineAssistant {
})
}
pub fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
let assist = if let Some(assist) = self.assists.get_mut(&assist_id) {
assist
} else {
@@ -1200,14 +1074,6 @@ impl EditorInlineAssists {
}
}),
_subscriptions: vec![
cx.observe_release(editor, {
let editor = editor.downgrade();
|_, cx| {
InlineAssistant::update_global(cx, |this, cx| {
this.handle_editor_release(editor, cx);
})
}
}),
cx.observe(editor, move |editor, cx| {
InlineAssistant::update_global(cx, |this, cx| {
this.handle_editor_change(editor, cx)
@@ -1272,7 +1138,7 @@ fn build_assist_editor_renderer(editor: &View<PromptEditor>) -> RenderBlock {
}
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
pub struct InlineAssistId(usize);
struct InlineAssistId(usize);
impl InlineAssistId {
fn post_inc(&mut self) -> InlineAssistId {
@@ -1432,7 +1298,8 @@ impl Render for PromptEditor {
PopoverMenu::new("model-switcher")
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for model in CompletionProvider::global(cx).available_models() {
for model in CompletionProvider::global(cx).available_models(cx)
{
menu = menu.custom_entry(
{
let model = model.clone();
@@ -1461,7 +1328,7 @@ impl Render for PromptEditor {
})
.trigger(
IconButton::new("context", IconName::Settings)
.shape(IconButtonShape::Square)
.size(ButtonSize::None)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(move |cx| {
@@ -1473,7 +1340,7 @@ impl Render for PromptEditor {
.display_name()
),
None,
"Change Model",
"Click to Change Model",
cx,
)
}),
@@ -1864,8 +1731,12 @@ impl PromptEditor {
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.editor,
@@ -1897,8 +1768,8 @@ impl InlineAssist {
include_context: bool,
editor: &View<Editor>,
prompt_editor: &View<PromptEditor>,
prompt_block_id: CustomBlockId,
end_block_id: CustomBlockId,
prompt_block_id: BlockId,
end_block_id: BlockId,
codegen: Model<Codegen>,
workspace: Option<WeakView<Workspace>>,
cx: &mut WindowContext,
@@ -1992,10 +1863,10 @@ impl InlineAssist {
}
struct InlineAssistDecorations {
prompt_block_id: CustomBlockId,
prompt_block_id: BlockId,
prompt_editor: View<PromptEditor>,
removed_line_block_ids: HashSet<CustomBlockId>,
end_block_id: CustomBlockId,
removed_line_block_ids: HashSet<BlockId>,
end_block_id: BlockId,
}
#[derive(Debug)]
@@ -2011,8 +1882,7 @@ pub struct Codegen {
range: Range<Anchor>,
edit_position: Anchor,
last_equal_ranges: Vec<Range<Anchor>>,
prepend_transaction_id: Option<TransactionId>,
generation_transaction_id: Option<TransactionId>,
transaction_id: Option<TransactionId>,
status: CodegenStatus,
generation: Task<()>,
diff: Diff,
@@ -2041,7 +1911,6 @@ impl Codegen {
pub fn new(
buffer: Model<MultiBuffer>,
range: Range<Anchor>,
prepend_transaction_id: Option<TransactionId>,
telemetry: Option<Arc<Telemetry>>,
cx: &mut ModelContext<Self>,
) -> Self {
@@ -2074,8 +1943,7 @@ impl Codegen {
range,
snapshot,
last_equal_ranges: Default::default(),
prepend_transaction_id,
generation_transaction_id: None,
transaction_id: Default::default(),
status: CodegenStatus::Idle,
generation: Task::ready(()),
diff: Diff::default(),
@@ -2091,13 +1959,8 @@ impl Codegen {
cx: &mut ModelContext<Self>,
) {
if let multi_buffer::Event::TransactionUndone { transaction_id } = event {
if self.generation_transaction_id == Some(*transaction_id) {
self.generation_transaction_id = None;
self.generation = Task::ready(());
cx.emit(CodegenEvent::Undone);
} else if self.prepend_transaction_id == Some(*transaction_id) {
self.prepend_transaction_id = None;
self.generation_transaction_id = None;
if self.transaction_id == Some(*transaction_id) {
self.transaction_id = None;
self.generation = Task::ready(());
cx.emit(CodegenEvent::Undone);
}
@@ -2108,12 +1971,7 @@ impl Codegen {
&self.last_equal_ranges
}
pub fn start(
&mut self,
telemetry_id: String,
stream: impl 'static + Future<Output = Result<BoxStream<'static, Result<String>>>>,
cx: &mut ModelContext<Self>,
) {
pub fn start(&mut self, prompt: LanguageModelRequest, cx: &mut ModelContext<Self>) {
let range = self.range.clone();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
@@ -2127,17 +1985,15 @@ impl Codegen {
.next()
.unwrap_or_else(|| snapshot.indent_size_for_line(MultiBufferRow(selection_start.row)));
let model_telemetry_id = prompt.model.telemetry_id();
let response = CompletionProvider::global(cx).complete(prompt, cx);
let telemetry = self.telemetry.clone();
self.edit_position = range.start;
self.diff = Diff::default();
self.status = CodegenStatus::Pending;
if let Some(transaction_id) = self.generation_transaction_id.take() {
self.buffer
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
}
self.generation = cx.spawn(|this, mut cx| {
async move {
let chunks = stream.await;
let response = response.await;
let generate = async {
let mut edit_start = range.start.to_offset(&snapshot);
@@ -2147,7 +2003,7 @@ impl Codegen {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(chunks?);
let chunks = StripInvalidSpans::new(response.inner.await?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
@@ -2230,7 +2086,7 @@ impl Codegen {
telemetry.report_assistant_event(
None,
telemetry_events::AssistantKind::Inline,
telemetry_id,
model_telemetry_id,
response_latency,
error_message,
);
@@ -2280,7 +2136,7 @@ impl Codegen {
});
if let Some(transaction) = transaction {
if let Some(first_transaction) = this.generation_transaction_id {
if let Some(first_transaction) = this.transaction_id {
// Group all assistant edits into the first transaction.
this.buffer.update(cx, |buffer, cx| {
buffer.merge_transactions(
@@ -2290,7 +2146,7 @@ impl Codegen {
)
});
} else {
this.generation_transaction_id = Some(transaction);
this.transaction_id = Some(transaction);
this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction(cx)
});
@@ -2333,12 +2189,7 @@ impl Codegen {
}
pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
if let Some(transaction_id) = self.prepend_transaction_id.take() {
self.buffer
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
}
if let Some(transaction_id) = self.generation_transaction_id.take() {
if let Some(transaction_id) = self.transaction_id.take() {
self.buffer
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
}
@@ -2600,8 +2451,11 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use crate::FakeCompletionProvider;
use super::*;
use completion::FakeCompletionProvider;
use futures::stream::{self};
use gpui::{Context, TestAppContext};
use indoc::indoc;
@@ -2612,7 +2466,6 @@ mod tests {
use rand::prelude::*;
use serde::Serialize;
use settings::SettingsStore;
use std::{future, sync::Arc};
#[derive(Serialize)]
pub struct DummyCompletionRequest {
@@ -2622,7 +2475,7 @@ mod tests {
#[gpui::test(iterations = 10)]
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(|cx| FakeCompletionProvider::setup_test(cx));
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
cx.update(language_settings::init);
let text = indoc! {"
@@ -2640,17 +2493,14 @@ mod tests {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
});
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.start(
String::new(),
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
cx,
)
codegen.start(LanguageModelRequest::default(), cx)
});
cx.background_executor.run_until_parked();
let mut new_text = concat!(
" let mut x = 0;\n",
" while x < 10 {\n",
@@ -2661,11 +2511,11 @@ mod tests {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
new_text = suffix;
cx.background_executor.run_until_parked();
}
drop(chunks_tx);
provider.finish_completion(&LanguageModelRequest::default());
cx.background_executor.run_until_parked();
assert_eq!(
@@ -2686,6 +2536,7 @@ mod tests {
cx: &mut TestAppContext,
mut rng: StdRng,
) {
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
@@ -2701,16 +2552,10 @@ mod tests {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 6))..snapshot.anchor_after(Point::new(1, 6))
});
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.start(
String::new(),
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
cx,
)
});
let request = LanguageModelRequest::default();
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
cx.background_executor.run_until_parked();
@@ -2724,11 +2569,11 @@ mod tests {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
new_text = suffix;
cx.background_executor.run_until_parked();
}
drop(chunks_tx);
provider.finish_completion(&LanguageModelRequest::default());
cx.background_executor.run_until_parked();
assert_eq!(
@@ -2749,7 +2594,7 @@ mod tests {
cx: &mut TestAppContext,
mut rng: StdRng,
) {
cx.update(|cx| FakeCompletionProvider::setup_test(cx));
let provider = cx.update(|cx| FakeCompletionProvider::setup_test(cx));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
@@ -2765,16 +2610,10 @@ mod tests {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 2))..snapshot.anchor_after(Point::new(1, 2))
});
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, None, cx));
let codegen = cx.new_model(|cx| Codegen::new(buffer.clone(), range, None, cx));
let (chunks_tx, chunks_rx) = mpsc::unbounded();
codegen.update(cx, |codegen, cx| {
codegen.start(
String::new(),
future::ready(Ok(chunks_rx.map(|chunk| Ok(chunk)).boxed())),
cx,
)
});
let request = LanguageModelRequest::default();
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
cx.background_executor.run_until_parked();
@@ -2788,11 +2627,11 @@ mod tests {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
chunks_tx.unbounded_send(chunk.to_string()).unwrap();
provider.send_completion(&LanguageModelRequest::default(), chunk.into());
new_text = suffix;
cx.background_executor.run_until_parked();
}
drop(chunks_tx);
provider.finish_completion(&LanguageModelRequest::default());
cx.background_executor.run_until_parked();
assert_eq!(

View File

@@ -23,7 +23,7 @@ impl RenderOnce for ModelSelector {
.with_handle(self.handle)
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for model in CompletionProvider::global(cx).available_models() {
for model in CompletionProvider::global(cx).available_models(cx) {
menu = menu.custom_entry(
{
let model = model.clone();

View File

@@ -1,9 +1,9 @@
use crate::{
slash_command::SlashCommandCompletionProvider, AssistantPanel, CompletionProvider,
InlineAssist, InlineAssistant,
InlineAssist, InlineAssistant, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use anyhow::{anyhow, Result};
use assets::Assets;
use assistant_slash_command::SlashCommandRegistry;
use chrono::{DateTime, Utc};
use collections::{HashMap, HashSet};
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
@@ -13,13 +13,12 @@ use futures::{
};
use fuzzy::StringMatchCandidate;
use gpui::{
actions, point, size, transparent_black, AppContext, AssetSource, BackgroundExecutor, Bounds,
EventEmitter, Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
};
use heed::{types::SerdeBincode, Database, RoTxn};
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
use parking_lot::RwLock;
use picker::{Picker, PickerDelegate};
use rope::Rope;
@@ -270,7 +269,7 @@ impl PickerDelegate for PromptPickerDelegate {
.flex_none()
.py_1()
.px_2()
.mx_1()
.mx_2()
.child(editor.clone())
}
}
@@ -449,6 +448,7 @@ impl PromptLibrary {
self.set_active_prompt(Some(prompt_id), cx);
} else if let Some(prompt_metadata) = self.store.metadata(prompt_id) {
let language_registry = self.language_registry.clone();
let commands = SlashCommandRegistry::global(cx);
let prompt = self.store.load(prompt_id);
self.pending_load = cx.spawn(|this, mut cx| async move {
let prompt = prompt.await;
@@ -477,7 +477,7 @@ impl PromptLibrary {
editor.set_use_modal_editing(false);
editor.set_current_line_highlight(Some(CurrentLineHighlight::None));
editor.set_completion_provider(Box::new(
SlashCommandCompletionProvider::new(None, None),
SlashCommandCompletionProvider::new(commands, None, None),
));
if focus {
editor.focus(cx);
@@ -629,7 +629,7 @@ impl PromptLibrary {
self.picker.update(cx, |picker, cx| picker.focus(cx));
}
pub fn inline_assist(&mut self, action: &InlineAssist, cx: &mut ViewContext<Self>) {
pub fn inline_assist(&mut self, _: &InlineAssist, cx: &mut ViewContext<Self>) {
let Some(active_prompt_id) = self.active_prompt_id else {
cx.propagate();
return;
@@ -637,10 +637,9 @@ impl PromptLibrary {
let prompt_editor = &self.prompt_editors[&active_prompt_id].body_editor;
let provider = CompletionProvider::global(cx);
let initial_prompt = action.prompt.clone();
if provider.is_authenticated() {
InlineAssistant::update_global(cx, |assistant, cx| {
assistant.assist(&prompt_editor, None, None, initial_prompt, cx)
assistant.assist(&prompt_editor, None, None, cx)
})
} else {
for window in cx.windows() {
@@ -769,7 +768,6 @@ impl PromptLibrary {
.capture_action(cx.listener(Self::focus_active_prompt))
.bg(cx.theme().colors().panel_background)
.h_full()
.px_1()
.w_1_3()
.overflow_x_hidden()
.child(
@@ -1299,17 +1297,6 @@ impl PromptStore {
fn first(&self) -> Option<PromptMetadata> {
self.metadata_cache.read().metadata.first().cloned()
}
pub fn operations_prompt(&self) -> String {
String::from_utf8(
Assets
.load("prompts/operations.md")
.unwrap()
.unwrap()
.to_vec(),
)
.unwrap()
}
}
/// Wraps a shared future to a prompt store so it can be assigned as a context global.

View File

@@ -0,0 +1,171 @@
use language::Rope;
use std::ops::Range;
/// Search the given buffer for the given substring, ignoring any differences
/// in line indentation between the query and the buffer.
///
/// Returns a vector of ranges of byte offsets in the buffer corresponding
/// to the entire lines of the buffer.
pub fn fuzzy_search_lines(haystack: &Rope, needle: &str) -> Option<Range<usize>> {
const SIMILARITY_THRESHOLD: f64 = 0.8;
let mut best_match: Option<(Range<usize>, f64)> = None; // (range, score)
let mut haystack_lines = haystack.chunks().lines();
let mut haystack_line_start = 0;
while let Some(mut haystack_line) = haystack_lines.next() {
let next_haystack_line_start = haystack_line_start + haystack_line.len() + 1;
let mut advanced_to_next_haystack_line = false;
let mut matched = true;
let match_start = haystack_line_start;
let mut match_end = next_haystack_line_start;
let mut match_score = 0.0;
let mut needle_lines = needle.lines().peekable();
while let Some(needle_line) = needle_lines.next() {
let similarity = line_similarity(haystack_line, needle_line);
if similarity >= SIMILARITY_THRESHOLD {
match_end = haystack_lines.offset();
match_score += similarity;
if needle_lines.peek().is_some() {
if let Some(next_haystack_line) = haystack_lines.next() {
advanced_to_next_haystack_line = true;
haystack_line = next_haystack_line;
} else {
matched = false;
break;
}
} else {
break;
}
} else {
matched = false;
break;
}
}
if matched
&& best_match
.as_ref()
.map(|(_, best_score)| match_score > *best_score)
.unwrap_or(true)
{
best_match = Some((match_start..match_end, match_score));
}
if advanced_to_next_haystack_line {
haystack_lines.seek(next_haystack_line_start);
}
haystack_line_start = next_haystack_line_start;
}
best_match.map(|(range, _)| range)
}
/// Calculates the similarity between two lines, ignoring leading and trailing whitespace,
/// using the Jaro-Winkler distance.
///
/// Returns a value between 0.0 and 1.0, where 1.0 indicates an exact match.
fn line_similarity(line1: &str, line2: &str) -> f64 {
strsim::jaro_winkler(line1.trim(), line2.trim())
}
#[cfg(test)]
mod test {
use super::*;
use gpui::{AppContext, Context as _};
use language::Buffer;
use unindent::Unindent as _;
use util::test::marked_text_ranges;
#[gpui::test]
fn test_fuzzy_search_lines(cx: &mut AppContext) {
let (text, expected_ranges) = marked_text_ranges(
&r#"
fn main() {
if a() {
assert_eq!(
1 + 2,
does_not_match,
);
}
println!("hi");
assert_eq!(
1 + 2,
3,
); // this last line does not match
« assert_eq!(
1 + 2,
3,
);
»
« assert_eq!(
"something",
"else",
);
»
}
"#
.unindent(),
false,
);
let buffer = cx.new_model(|cx| Buffer::local(&text, cx));
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[0]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
1 + 2,
3,
);
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[0]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
asst_eq!(
\"something\",
\"els\"
)
"
.unindent(),
)
.unwrap();
assert_eq!(actual_range, expected_ranges[1]);
let actual_range = fuzzy_search_lines(
snapshot.as_rope(),
&"
assert_eq!(
2 + 1,
3,
);
"
.unindent(),
);
assert_eq!(actual_range, None);
}
}

View File

@@ -27,11 +27,11 @@ pub mod now_command;
pub mod project_command;
pub mod prompt_command;
pub mod search_command;
pub mod symbols_command;
pub mod tabs_command;
pub mod term_command;
pub(crate) struct SlashCommandCompletionProvider {
commands: Arc<SlashCommandRegistry>,
cancel_flag: Mutex<Arc<AtomicBool>>,
editor: Option<WeakView<ContextEditor>>,
workspace: Option<WeakView<Workspace>>,
@@ -46,12 +46,14 @@ pub(crate) struct SlashCommandLine {
impl SlashCommandCompletionProvider {
pub fn new(
commands: Arc<SlashCommandRegistry>,
editor: Option<WeakView<ContextEditor>>,
workspace: Option<WeakView<Workspace>>,
) -> Self {
Self {
cancel_flag: Mutex::new(Arc::new(AtomicBool::new(false))),
editor,
commands,
workspace,
}
}
@@ -63,8 +65,8 @@ impl SlashCommandCompletionProvider {
name_range: Range<Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<project::Completion>>> {
let commands = SlashCommandRegistry::global(cx);
let candidates = commands
let candidates = self
.commands
.command_names()
.into_iter()
.enumerate()
@@ -74,6 +76,7 @@ impl SlashCommandCompletionProvider {
char_bag: def.as_ref().into(),
})
.collect::<Vec<_>>();
let commands = self.commands.clone();
let command_name = command_name.to_string();
let editor = self.editor.clone();
let workspace = self.workspace.clone();
@@ -152,8 +155,7 @@ impl SlashCommandCompletionProvider {
flag.store(true, SeqCst);
*flag = new_cancel_flag.clone();
let commands = SlashCommandRegistry::global(cx);
if let Some(command) = commands.command(command_name) {
if let Some(command) = self.commands.command(command_name) {
let completions = command.complete_argument(
argument,
new_cancel_flag.clone(),

View File

@@ -1,16 +1,14 @@
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::time::Duration;
use anyhow::{anyhow, bail, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
};
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
use gpui::{AppContext, Model, Task, WeakView};
use indexed_docs::{
DocsDotRsProvider, IndexedDocsRegistry, IndexedDocsStore, LocalRustdocProvider, PackageName,
ProviderId,
IndexedDocsRegistry, IndexedDocsStore, LocalProvider, PackageName, ProviderId, RustdocIndexer,
};
use language::LspAdapterDelegate;
use project::{Project, ProjectPath};
@@ -36,22 +34,22 @@ impl DocsSlashCommand {
))
}
/// Ensures that the indexed doc providers for Rust are registered.
/// Ensures that the rustdoc provider is registered.
///
/// Ideally we would do this sooner, but we need to wait until we're able to
/// access the workspace so we can read the project.
fn ensure_rust_doc_providers_are_registered(
fn ensure_rustdoc_provider_is_registered(
&self,
workspace: Option<WeakView<Workspace>>,
cx: &mut AppContext,
) {
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
if indexed_docs_registry
.get_provider_store(LocalRustdocProvider::id())
.get_provider_store(ProviderId::rustdoc())
.is_none()
{
let index_provider_deps = maybe!({
let workspace = workspace.clone().ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace was dropped"))?;
@@ -65,80 +63,11 @@ impl DocsSlashCommand {
});
if let Some((fs, cargo_workspace_root)) = index_provider_deps.log_err() {
indexed_docs_registry.register_provider(Box::new(LocalRustdocProvider::new(
fs,
cargo_workspace_root,
)));
indexed_docs_registry.register_provider(Box::new(RustdocIndexer::new(Box::new(
LocalProvider::new(fs, cargo_workspace_root),
))));
}
}
if indexed_docs_registry
.get_provider_store(DocsDotRsProvider::id())
.is_none()
{
let http_client = maybe!({
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace was dropped"))?;
let project = workspace.read(cx).project().clone();
anyhow::Ok(project.read(cx).client().http_client().clone())
});
if let Some(http_client) = http_client.log_err() {
indexed_docs_registry
.register_provider(Box::new(DocsDotRsProvider::new(http_client)));
}
}
}
/// Runs just-in-time indexing for a given package, in case the slash command
/// is run without any entries existing in the index.
fn run_just_in_time_indexing(
store: Arc<IndexedDocsStore>,
key: String,
package: PackageName,
executor: BackgroundExecutor,
) -> Task<()> {
executor.clone().spawn(async move {
let (prefix, needs_full_index) = if let Some((prefix, _)) = key.split_once('*') {
// If we have a wildcard in the search, we want to wait until
// we've completely finished indexing so we get a full set of
// results for the wildcard.
(prefix.to_string(), true)
} else {
(key, false)
};
// If we already have some entries, we assume that we've indexed the package before
// and don't need to do it again.
let has_any_entries = store
.any_with_prefix(prefix.clone())
.await
.unwrap_or_default();
if has_any_entries {
return ();
};
let index_task = store.clone().index(package.clone());
if needs_full_index {
_ = index_task.await;
} else {
loop {
executor.timer(Duration::from_millis(200)).await;
if store
.any_with_prefix(prefix.clone())
.await
.unwrap_or_default()
|| !store.is_indexing(&package)
{
break;
}
}
}
})
}
}
@@ -166,7 +95,7 @@ impl SlashCommand for DocsSlashCommand {
workspace: Option<WeakView<Workspace>>,
cx: &mut AppContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
self.ensure_rust_doc_providers_are_registered(workspace, cx);
self.ensure_rustdoc_provider_is_registered(workspace, cx);
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
let args = DocsSlashCommandArgs::parse(&query);
@@ -192,14 +121,6 @@ impl SlashCommand for DocsSlashCommand {
match args {
DocsSlashCommandArgs::NoProvider => {
let providers = indexed_docs_registry.list_providers();
if providers.is_empty() {
return Ok(vec![ArgumentCompletion {
label: "No available docs providers.".to_string(),
new_text: String::new(),
run_command: false,
}]);
}
Ok(providers
.into_iter()
.map(|provider| ArgumentCompletion {
@@ -250,72 +171,46 @@ impl SlashCommand for DocsSlashCommand {
};
let args = DocsSlashCommandArgs::parse(argument);
let executor = cx.background_executor().clone();
let task = cx.background_executor().spawn({
let text = cx.background_executor().spawn({
let store = args
.provider()
.ok_or_else(|| anyhow!("no docs provider specified"))
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
async move {
let (provider, key) = match args.clone() {
match args {
DocsSlashCommandArgs::NoProvider => bail!("no docs provider specified"),
DocsSlashCommandArgs::SearchPackageDocs {
provider, package, ..
} => (provider, package),
} => {
let store = store?;
let item_docs = store.load(package.clone()).await?;
anyhow::Ok((provider, package, item_docs.to_string()))
}
DocsSlashCommandArgs::SearchItemDocs {
provider,
item_path,
..
} => (provider, item_path),
};
} => {
let store = store?;
let item_docs = store.load(item_path.clone()).await?;
let store = store?;
if let Some(package) = args.package() {
Self::run_just_in_time_indexing(store.clone(), key.clone(), package, executor)
.await;
}
let (text, ranges) = if let Some((prefix, _)) = key.split_once('*') {
let docs = store.load_many_by_prefix(prefix.to_string()).await?;
let mut text = String::new();
let mut ranges = Vec::new();
for (key, docs) in docs {
let prev_len = text.len();
text.push_str(&docs.0);
text.push_str("\n");
ranges.push((key, prev_len..text.len()));
text.push_str("\n");
anyhow::Ok((provider, item_path, item_docs.to_string()))
}
(text, ranges)
} else {
let item_docs = store.load(key.clone()).await?;
let text = item_docs.to_string();
let range = 0..text.len();
(text, vec![(key, range)])
};
anyhow::Ok((provider, text, ranges))
}
}
});
cx.foreground_executor().spawn(async move {
let (provider, text, ranges) = task.await?;
let (provider, path, text) = text.await?;
let range = 0..text.len();
Ok(SlashCommandOutput {
text,
sections: ranges
.into_iter()
.map(|(key, range)| SlashCommandOutputSection {
range,
icon: IconName::FileDoc,
label: format!("docs ({provider}): {key}",).into(),
})
.collect(),
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::FileRust,
label: format!("docs ({provider}): {path}",).into(),
}],
run_commands_in_text: false,
})
})
@@ -326,7 +221,7 @@ fn is_item_path_delimiter(char: char) -> bool {
!char.is_alphanumeric() && char != '-' && char != '_'
}
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq)]
pub(crate) enum DocsSlashCommandArgs {
NoProvider,
SearchPackageDocs {

View File

@@ -27,7 +27,7 @@ pub(crate) struct FetchSlashCommand;
impl FetchSlashCommand {
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: &str) -> Result<String> {
let mut url = url.to_owned();
if !url.starts_with("https://") && !url.starts_with("http://") {
if !url.starts_with("https://") {
url = format!("https://{url}");
}

View File

@@ -23,7 +23,7 @@ impl SlashCommand for NowSlashCommand {
}
fn menu_text(&self) -> String {
"Insert Current Date and Time".into()
"Insert current date and time".into()
}
fn requires_argument(&self) -> bool {

View File

@@ -1,89 +0,0 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use editor::Editor;
use gpui::{AppContext, Task, WeakView};
use language::LspAdapterDelegate;
use std::sync::Arc;
use std::{path::Path, sync::atomic::AtomicBool};
use ui::{IconName, WindowContext};
use workspace::Workspace;
pub(crate) struct OutlineSlashCommand;
impl SlashCommand for OutlineSlashCommand {
fn name(&self) -> String {
"symbols".into()
}
fn description(&self) -> String {
"insert symbols for active tab".into()
}
fn menu_text(&self) -> String {
"Insert Symbols for Active Tab".into()
}
fn complete_argument(
self: Arc<Self>,
_query: String,
_cancel: Arc<AtomicBool>,
_workspace: Option<WeakView<Workspace>>,
_cx: &mut AppContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
}
fn requires_argument(&self) -> bool {
false
}
fn run(
self: Arc<Self>,
_argument: Option<&str>,
workspace: WeakView<Workspace>,
_delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut WindowContext,
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let Some(active_item) = workspace.active_item(cx) else {
return Task::ready(Err(anyhow!("no active tab")));
};
let Some(buffer) = active_item
.downcast::<Editor>()
.and_then(|editor| editor.read(cx).buffer().read(cx).as_singleton())
else {
return Task::ready(Err(anyhow!("active tab is not an editor")));
};
let snapshot = buffer.read(cx).snapshot();
let path = snapshot.resolve_file_path(cx, true);
cx.background_executor().spawn(async move {
let outline = snapshot
.outline(None)
.context("no symbols for active tab")?;
let path = path.as_deref().unwrap_or(Path::new("untitled"));
let mut outline_text = format!("Symbols for {}:\n", path.display());
for item in &outline.path_candidates {
outline_text.push_str("- ");
outline_text.push_str(&item.string);
outline_text.push('\n');
}
Ok(SlashCommandOutput {
sections: vec![SlashCommandOutputSection {
range: 0..outline_text.len(),
icon: IconName::ListTree,
label: path.to_string_lossy().to_string().into(),
}],
text: outline_text,
run_commands_in_text: false,
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))
}
}

View File

@@ -31,7 +31,7 @@ impl SlashCommand for TermSlashCommand {
}
fn menu_text(&self) -> String {
"Insert Terminal Output".into()
"Insert terminal output".into()
}
fn requires_argument(&self) -> bool {

View File

@@ -1,7 +1,7 @@
use crate::{
assistant_settings::AssistantSettings, humanize_token_count,
prompts::generate_terminal_assistant_prompt, AssistantPanel, AssistantPanelEvent,
CompletionProvider,
CompletionProvider, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use anyhow::{Context as _, Result};
use client::telemetry::Telemetry;
@@ -13,11 +13,10 @@ use editor::{
use fs::Fs;
use futures::{channel::mpsc, SinkExt, StreamExt};
use gpui::{
AppContext, Context, EventEmitter, FocusHandle, FocusableView, Global, Model, ModelContext,
Subscription, Task, TextStyle, UpdateGlobal, View, WeakView,
AppContext, Context, EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, Global,
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, WeakView, WhiteSpace,
};
use language::Buffer;
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
use settings::{update_settings_file, Settings};
use std::{
cmp,
@@ -27,7 +26,7 @@ use std::{
use terminal::Terminal;
use terminal_view::TerminalView;
use theme::ThemeSettings;
use ui::{prelude::*, ContextMenu, IconButtonShape, PopoverMenu, Tooltip};
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use util::ResultExt;
use workspace::{notifications::NotificationId, Toast, Workspace};
@@ -74,13 +73,11 @@ impl TerminalInlineAssistant {
terminal_view: &View<TerminalView>,
workspace: Option<WeakView<Workspace>>,
assistant_panel: Option<&View<AssistantPanel>>,
initial_prompt: Option<String>,
cx: &mut WindowContext,
) {
let terminal = terminal_view.read(cx).terminal().clone();
let assist_id = self.next_assist_id.post_inc();
let prompt_buffer =
cx.new_model(|cx| Buffer::local(initial_prompt.unwrap_or_default(), cx));
let prompt_buffer = cx.new_model(|cx| Buffer::local("", cx));
let prompt_buffer = cx.new_model(|cx| MultiBuffer::singleton(prompt_buffer, cx));
let codegen = cx.new_model(|_| Codegen::new(terminal, self.telemetry.clone()));
@@ -559,7 +556,8 @@ impl Render for PromptEditor {
PopoverMenu::new("model-switcher")
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for model in CompletionProvider::global(cx).available_models() {
for model in CompletionProvider::global(cx).available_models(cx)
{
menu = menu.custom_entry(
{
let model = model.clone();
@@ -588,7 +586,7 @@ impl Render for PromptEditor {
})
.trigger(
IconButton::new("context", IconName::Settings)
.shape(IconButtonShape::Square)
.size(ButtonSize::None)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(move |cx| {
@@ -600,7 +598,7 @@ impl Render for PromptEditor {
.display_name()
),
None,
"Change Model",
"Click to Change Model",
cx,
)
}),
@@ -945,9 +943,13 @@ impl PromptEditor {
font_family: settings.ui_font.family.clone(),
font_features: settings.ui_font.features.clone(),
font_size: rems(0.875).into(),
font_weight: settings.ui_font.weight,
font_weight: FontWeight::NORMAL,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(
&self.editor,
@@ -1024,7 +1026,7 @@ impl Codegen {
let telemetry = self.telemetry.clone();
let model_telemetry_id = prompt.model.telemetry_id();
let response = CompletionProvider::global(cx).stream_completion(prompt, cx);
let response = CompletionProvider::global(cx).complete(prompt, cx);
self.generation = cx.spawn(|this, mut cx| async move {
let response = response.await;
@@ -1035,8 +1037,8 @@ impl Codegen {
let mut response_latency = None;
let request_start = Instant::now();
let task = async {
let mut chunks = response?;
while let Some(chunk) = chunks.next().await {
let mut response = response.inner.await?;
while let Some(chunk) = response.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}

View File

@@ -67,7 +67,7 @@ pub struct SlashCommandOutput {
pub run_commands_in_text: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Clone, Serialize, Deserialize)]
pub struct SlashCommandOutputSection<T> {
pub range: Range<T>,
pub icon: IconName,

View File

@@ -23,7 +23,6 @@ isahc.workspace = true
log.workspace = true
markdown_preview.workspace = true
menu.workspace = true
paths.workspace = true
release_channel.workspace = true
schemars.workspace = true
serde.workspace = true

View File

@@ -1,7 +1,7 @@
mod update_notification;
use anyhow::{anyhow, Context, Result};
use client::{Client, TelemetrySettings};
use client::{Client, TelemetrySettings, ZED_APP_PATH};
use db::kvp::KEY_VALUE_STORE;
use db::RELEASE_CHANNEL;
use editor::{Editor, MultiBuffer};
@@ -28,7 +28,7 @@ use std::{
consts::{ARCH, OS},
},
ffi::OsString,
path::{Path, PathBuf},
path::PathBuf,
sync::Arc,
time::Duration,
};
@@ -288,12 +288,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
Some(tab_description),
cx,
);
workspace.add_item_to_active_pane(
Box::new(view.clone()),
None,
true,
cx,
);
workspace.add_item_to_active_pane(Box::new(view.clone()), None, cx);
cx.notify();
})
.log_err();
@@ -359,6 +354,7 @@ impl AutoUpdater {
return;
}
self.status = AutoUpdateStatus::Checking;
cx.notify();
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
@@ -384,65 +380,29 @@ impl AutoUpdater {
cx.notify();
}
pub async fn get_latest_remote_server_release(
os: &str,
arch: &str,
mut release_channel: ReleaseChannel,
cx: &mut AsyncAppContext,
) -> Result<PathBuf> {
let this = cx.update(|cx| {
cx.default_global::<GlobalAutoUpdate>()
.0
.clone()
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
let (client, current_version) = this.read_with(&cx, |this, _| {
(this.http_client.clone(), this.current_version)
})?;
if release_channel == ReleaseChannel::Dev {
release_channel = ReleaseChannel::Nightly;
}
let asset = match OS {
"linux" => format!("zed-linux-{}.tar.gz", ARCH),
"macos" => "Zed.dmg".into(),
_ => return Err(anyhow!("auto-update not supported for OS {:?}", OS)),
};
let release = Self::get_latest_release(
&this,
"zed-remote-server",
os,
arch,
Some(release_channel),
cx,
)
.await?;
let servers_dir = paths::remote_servers_dir();
let channel_dir = servers_dir.join(release_channel.dev_name());
let platform_dir = channel_dir.join(format!("{}-{}", os, arch));
let version_path = platform_dir.join(format!("{}.gz", release.version));
smol::fs::create_dir_all(&platform_dir).await.ok();
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if smol::fs::metadata(&version_path).await.is_err() {
log::info!("downloading zed-remote-server {os} {arch}");
download_remote_server_binary(&version_path, release, client, cx).await?;
}
Ok(version_path)
}
async fn get_latest_release(
this: &Model<Self>,
asset: &str,
os: &str,
arch: &str,
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
let mut url_string = client.build_url(&format!(
"/api/releases/latest?asset={}&os={}&arch={}",
asset, os, arch
asset, OS, ARCH
));
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
url_string += "&";
url_string += param;
}
cx.update(|cx| {
if let Some(param) = ReleaseChannel::try_global(cx)
.and_then(|release_channel| release_channel.release_query_param())
{
url_string += "&";
url_string += param;
}
})?;
let mut response = client.get(&url_string, Default::default(), true).await?;
@@ -453,34 +413,8 @@ impl AutoUpdater {
.await
.context("error reading release")?;
if !response.status().is_success() {
Err(anyhow!(
"failed to fetch release: {:?}",
String::from_utf8_lossy(&body),
))?;
}
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
"error deserializing release {:?}",
String::from_utf8_lossy(&body),
)
})
}
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
let (client, current_version, release_channel) = this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Checking;
cx.notify();
(
this.http_client.clone(),
this.current_version,
ReleaseChannel::try_global(cx),
)
})?;
let release =
Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
let release: JsonRelease =
serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
let should_download = match *RELEASE_CHANNEL {
ReleaseChannel::Nightly => cx
@@ -507,21 +441,19 @@ impl AutoUpdater {
let temp_dir = tempfile::Builder::new()
.prefix("zed-auto-update")
.tempdir()?;
let filename = match OS {
"macos" => Ok("Zed.dmg"),
"linux" => Ok("zed.tar.gz"),
_ => Err(anyhow!("not supported: {:?}", OS)),
}?;
let downloaded_asset = temp_dir.path().join(filename);
download_release(&downloaded_asset, release, client, &cx).await?;
let downloaded_asset = download_release(&temp_dir, release, &asset, client, &cx).await?;
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Installing;
cx.notify();
})?;
let binary_path = match OS {
// We store the path of our current binary, before we install, since installation might
// delete it. Once deleted, it's hard to get the path to our binary on Linux.
// So we cache it here, which allows us to then restart later on.
let binary_path = cx.update(|cx| cx.app_path())??;
match OS {
"macos" => install_release_macos(&temp_dir, downloaded_asset, &cx).await,
"linux" => install_release_linux(&temp_dir, downloaded_asset, &cx).await,
_ => Err(anyhow!("not supported: {:?}", OS)),
@@ -568,38 +500,14 @@ impl AutoUpdater {
}
}
async fn download_remote_server_binary(
target_path: &PathBuf,
release: JsonRelease,
client: Arc<HttpClientWithUrl>,
cx: &AsyncAppContext,
) -> Result<()> {
let mut target_file = File::create(&target_path).await?;
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
let installation_id = Client::global(cx).telemetry().installation_id();
let release_channel =
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
let telemetry = TelemetrySettings::get_global(cx).metrics;
(installation_id, release_channel, telemetry)
})?;
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
installation_id,
release_channel,
telemetry,
})?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut target_file).await?;
Ok(())
}
async fn download_release(
target_path: &Path,
temp_dir: &tempfile::TempDir,
release: JsonRelease,
target_filename: &str,
client: Arc<HttpClientWithUrl>,
cx: &AsyncAppContext,
) -> Result<()> {
) -> Result<PathBuf> {
let target_path = temp_dir.path().join(target_filename);
let mut target_file = File::create(&target_path).await?;
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
@@ -621,17 +529,16 @@ async fn download_release(
smol::io::copy(response.body_mut(), &mut target_file).await?;
log::info!("downloaded update. path:{:?}", target_path);
Ok(())
Ok(target_path)
}
async fn install_release_linux(
temp_dir: &tempfile::TempDir,
downloaded_tar_gz: PathBuf,
cx: &AsyncAppContext,
) -> Result<PathBuf> {
) -> Result<()> {
let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?;
let home_dir = PathBuf::from(env::var("HOME").context("no HOME env var set")?);
let running_app_path = cx.update(|cx| cx.app_path())??;
let extracted = temp_dir.path().join("zed");
fs::create_dir_all(&extracted)
@@ -662,16 +569,7 @@ async fn install_release_linux(
let app_folder_name = format!("zed{}.app", suffix);
let from = extracted.join(&app_folder_name);
let mut to = home_dir.join(".local");
let expected_suffix = format!("{}/libexec/zed-editor", app_folder_name);
if let Some(prefix) = running_app_path
.to_str()
.and_then(|str| str.strip_suffix(&expected_suffix))
{
to = PathBuf::from(prefix);
}
let to = home_dir.join(".local");
let output = Command::new("rsync")
.args(&["-av", "--delete"])
@@ -688,15 +586,17 @@ async fn install_release_linux(
String::from_utf8_lossy(&output.stderr)
);
Ok(to.join(expected_suffix))
Ok(())
}
async fn install_release_macos(
temp_dir: &tempfile::TempDir,
downloaded_dmg: PathBuf,
cx: &AsyncAppContext,
) -> Result<PathBuf> {
let running_app_path = cx.update(|cx| cx.app_path())??;
) -> Result<()> {
let running_app_path = ZED_APP_PATH
.clone()
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
let running_app_filename = running_app_path
.file_name()
.ok_or_else(|| anyhow!("invalid running app path"))?;
@@ -744,5 +644,5 @@ async fn install_release_macos(
String::from_utf8_lossy(&output.stderr)
);
Ok(running_app_path)
Ok(())
}

View File

@@ -72,7 +72,7 @@ impl Render for Breadcrumbs {
.into_any()
});
let breadcrumbs = Itertools::intersperse_with(highlighted_segments, || {
Label::new("").color(Color::Placeholder).into_any_element()
Label::new("").color(Color::Muted).into_any_element()
});
let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs);
@@ -83,7 +83,7 @@ impl Render for Breadcrumbs {
Some(editor) => element.child(
ButtonLike::new("toggle outline view")
.child(breadcrumbs_stack)
.style(ButtonStyle::Transparent)
.style(ButtonStyle::Subtle)
.on_click(move |_, cx| {
if let Some(editor) = editor.upgrade() {
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
@@ -113,30 +113,29 @@ impl ToolbarItemView for Breadcrumbs {
) -> ToolbarItemLocation {
cx.notify();
self.active_item = None;
let Some(item) = active_pane_item else {
return ToolbarItemLocation::Hidden;
};
let this = cx.view().downgrade();
self.subscription = Some(item.subscribe_to_item_events(
cx,
Box::new(move |event, cx| {
if let ItemEvent::UpdateBreadcrumbs = event {
this.update(cx, |this, cx| {
cx.notify();
if let Some(active_item) = this.active_item.as_ref() {
cx.emit(ToolbarItemEvent::ChangeLocation(
active_item.breadcrumb_location(cx),
))
}
})
.ok();
}
}),
));
self.active_item = Some(item.boxed_clone());
item.breadcrumb_location(cx)
if let Some(item) = active_pane_item {
let this = cx.view().downgrade();
self.subscription = Some(item.subscribe_to_item_events(
cx,
Box::new(move |event, cx| {
if let ItemEvent::UpdateBreadcrumbs = event {
this.update(cx, |this, cx| {
cx.notify();
if let Some(active_item) = this.active_item.as_ref() {
cx.emit(ToolbarItemEvent::ChangeLocation(
active_item.breadcrumb_location(cx),
))
}
})
.ok();
}
}),
));
self.active_item = Some(item.boxed_clone());
item.breadcrumb_location(cx)
} else {
ToolbarItemLocation::Hidden
}
}
fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext<Self>) {

View File

@@ -11,7 +11,6 @@ pub struct IpcHandshake {
pub enum CliRequest {
Open {
paths: Vec<String>,
urls: Vec<String>,
wait: bool,
open_new_workspace: Option<bool>,
dev_server_token: Option<String>,

View File

@@ -5,7 +5,6 @@ use clap::Parser;
use cli::{ipc::IpcOneShotServer, CliRequest, CliResponse, IpcHandshake};
use parking_lot::Mutex;
use std::{
convert::Infallible,
env, fs, io,
path::{Path, PathBuf},
process::ExitStatus,
@@ -38,7 +37,8 @@ struct Args {
///
/// Use `path:line:row` syntax to open a file at a specific location.
/// Non-existing paths and directories will ignore `:line:row` suffix.
paths_with_position: Vec<String>,
#[arg(value_parser = parse_path_with_position)]
paths_with_position: Vec<PathLikeWithPosition<PathBuf>>,
/// Print Zed's version and the app path.
#[arg(short, long)]
version: bool,
@@ -53,30 +53,12 @@ struct Args {
dev_server_token: Option<String>,
}
fn parse_path_with_position(argument_str: &str) -> Result<String, std::io::Error> {
let path_like = PathLikeWithPosition::parse_str::<Infallible>(argument_str, |_, path_str| {
fn parse_path_with_position(
argument_str: &str,
) -> Result<PathLikeWithPosition<PathBuf>, std::convert::Infallible> {
PathLikeWithPosition::parse_str(argument_str, |_, path_str| {
Ok(Path::new(path_str).to_path_buf())
})
.unwrap();
let curdir = env::current_dir()?;
let canonicalized = path_like.map_path_like(|path| match fs::canonicalize(&path) {
Ok(path) => Ok(path),
Err(e) => {
if let Some(mut parent) = path.parent() {
if parent == Path::new("") {
parent = &curdir
}
match fs::canonicalize(parent) {
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
Err(_) => Err(e),
}
} else {
Err(e)
}
}
})?;
Ok(canonicalized.to_string(|path| path.display().to_string()))
}
fn main() -> Result<()> {
@@ -109,6 +91,28 @@ fn main() -> Result<()> {
return Ok(());
}
let curdir = env::current_dir()?;
let mut paths = vec![];
for path in args.paths_with_position {
let canonicalized = path.map_path_like(|path| match fs::canonicalize(&path) {
Ok(path) => Ok(path),
Err(e) => {
if let Some(mut parent) = path.parent() {
if parent == Path::new("") {
parent = &curdir;
}
match fs::canonicalize(parent) {
Ok(parent) => Ok(parent.join(path.file_name().unwrap())),
Err(_) => Err(e),
}
} else {
Err(e)
}
}
})?;
paths.push(canonicalized.to_string(|path| path.display().to_string()))
}
let (server, server_name) =
IpcOneShotServer::<IpcHandshake>::new().context("Handshake before Zed spawn")?;
let url = format!("zed-cli://{server_name}");
@@ -122,20 +126,6 @@ fn main() -> Result<()> {
};
let exit_status = Arc::new(Mutex::new(None));
let mut paths = vec![];
let mut urls = vec![];
for path in args.paths_with_position.iter() {
if path.starts_with("zed://")
|| path.starts_with("http://")
|| path.starts_with("https://")
|| path.starts_with("file://")
|| path.starts_with("ssh://")
{
urls.push(path.to_string());
} else {
paths.push(parse_path_with_position(path)?)
}
}
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
@@ -144,7 +134,6 @@ fn main() -> Result<()> {
let (tx, rx) = (handshake.requests, handshake.responses);
tx.send(CliRequest::Open {
paths,
urls,
wait: args.wait,
open_new_workspace,
dev_server_token: args.dev_server_token,

View File

@@ -13,9 +13,8 @@ use async_tungstenite::tungstenite::{
use clock::SystemClock;
use collections::HashMap;
use futures::{
channel::oneshot,
future::{BoxFuture, LocalBoxFuture},
AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt, TryFutureExt as _, TryStreamExt,
channel::oneshot, future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, Stream, StreamExt,
TryFutureExt as _, TryStreamExt,
};
use gpui::{
actions, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Global, Model, Task, WeakModel,
@@ -24,7 +23,6 @@ use http::{HttpClient, HttpClientWithUrl};
use lazy_static::lazy_static;
use parking_lot::RwLock;
use postage::watch;
use proto::ProtoClient;
use rand::prelude::*;
use release_channel::{AppVersion, ReleaseChannel};
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
@@ -219,9 +217,6 @@ pub struct Client {
>,
>,
>,
#[cfg(any(test, feature = "test-support"))]
rpc_url: RwLock<Option<Url>>,
}
#[derive(Error, Debug)]
@@ -532,8 +527,6 @@ impl Client {
authenticate: Default::default(),
#[cfg(any(test, feature = "test-support"))]
establish_connection: Default::default(),
#[cfg(any(test, feature = "test-support"))]
rpc_url: RwLock::default(),
})
}
@@ -591,12 +584,6 @@ impl Client {
self
}
#[cfg(any(test, feature = "test-support"))]
pub fn override_rpc_url(&self, url: Url) -> &Self {
*self.rpc_url.write() = Some(url);
self
}
pub fn global(cx: &AppContext) -> Arc<Self> {
cx.global::<GlobalClient>().0.clone()
}
@@ -1099,50 +1086,38 @@ impl Client {
self.establish_websocket_connection(credentials, cx)
}
fn rpc_url(
&self,
async fn get_rpc_url(
http: Arc<HttpClientWithUrl>,
release_channel: Option<ReleaseChannel>,
) -> impl Future<Output = Result<Url>> {
#[cfg(any(test, feature = "test-support"))]
let url_override = self.rpc_url.read().clone();
async move {
#[cfg(any(test, feature = "test-support"))]
if let Some(url) = url_override {
return Ok(url);
}
if let Some(url) = &*ZED_RPC_URL {
return Url::parse(url).context("invalid rpc url");
}
let mut url = http.build_url("/rpc");
if let Some(preview_param) =
release_channel.and_then(|channel| channel.release_query_param())
{
url += "?";
url += preview_param;
}
let response = http.get(&url, Default::default(), false).await?;
let collab_url = if response.status().is_redirection() {
response
.headers()
.get("Location")
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
.to_str()
.map_err(EstablishConnectionError::other)?
.to_string()
} else {
Err(anyhow!(
"unexpected /rpc response status {}",
response.status()
))?
};
Url::parse(&collab_url).context("invalid rpc url")
) -> Result<Url> {
if let Some(url) = &*ZED_RPC_URL {
return Url::parse(url).context("invalid rpc url");
}
let mut url = http.build_url("/rpc");
if let Some(preview_param) =
release_channel.and_then(|channel| channel.release_query_param())
{
url += "?";
url += preview_param;
}
let response = http.get(&url, Default::default(), false).await?;
let collab_url = if response.status().is_redirection() {
response
.headers()
.get("Location")
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
.to_str()
.map_err(EstablishConnectionError::other)?
.to_string()
} else {
Err(anyhow!(
"unexpected /rpc response status {}",
response.status()
))?
};
Url::parse(&collab_url).context("invalid rpc url")
}
fn establish_websocket_connection(
@@ -1169,9 +1144,8 @@ impl Client {
);
let http = self.http.clone();
let rpc_url = self.rpc_url(http, release_channel);
cx.background_executor().spawn(async move {
let mut rpc_url = rpc_url.await?;
let mut rpc_url = Self::get_rpc_url(http, release_channel).await?;
let rpc_host = rpc_url
.host_str()
.zip(rpc_url.port_or_known_default())
@@ -1212,7 +1186,6 @@ impl Client {
cx: &AsyncAppContext,
) -> Task<Result<Credentials>> {
let http = self.http.clone();
let this = self.clone();
cx.spawn(|cx| async move {
let background = cx.background_executor().clone();
@@ -1242,8 +1215,7 @@ impl Client {
{
eprintln!("authenticate as admin {login}, {token}");
return this
.authenticate_as_admin(http, login.clone(), token.clone())
return Self::authenticate_as_admin(http, login.clone(), token.clone())
.await;
}
@@ -1331,7 +1303,6 @@ impl Client {
}
async fn authenticate_as_admin(
self: &Arc<Self>,
http: Arc<HttpClientWithUrl>,
login: String,
mut api_token: String,
@@ -1348,7 +1319,7 @@ impl Client {
// Use the collab server's admin API to retrieve the id
// of the impersonated user.
let mut url = self.rpc_url(http.clone(), None).await?;
let mut url = Self::get_rpc_url(http.clone(), None).await?;
url.set_path("/user");
url.set_query(Some(&format!("github_login={login}")));
let request = Request::get(url.as_str())
@@ -1410,11 +1381,6 @@ impl Client {
self.peer.send(self.connection_id()?, message)
}
fn send_dynamic(&self, envelope: proto::Envelope) -> Result<()> {
let connection_id = self.connection_id()?;
self.peer.send_dynamic(connection_id, envelope)
}
pub fn request<T: RequestMessage>(
&self,
request: T,
@@ -1613,20 +1579,6 @@ impl Client {
}
}
impl ProtoClient for Client {
fn request(
&self,
envelope: proto::Envelope,
request_type: &'static str,
) -> BoxFuture<'static, Result<proto::Envelope>> {
self.request_dynamic(envelope, request_type).boxed()
}
fn send(&self, envelope: proto::Envelope) -> Result<()> {
self.send_dynamic(envelope)
}
}
#[derive(Serialize, Deserialize)]
struct DevelopmentCredentials {
user_id: u64,

View File

@@ -0,0 +1 @@

View File

@@ -18,5 +18,4 @@ test-support = ["dep:parking_lot"]
[dependencies]
chrono.workspace = true
parking_lot = { workspace = true, optional = true }
serde.workspace = true
smallvec.workspace = true

View File

@@ -1,6 +1,5 @@
mod system_clock;
use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use std::{
cmp::{self, Ordering},
@@ -17,7 +16,7 @@ pub type Seq = u32;
/// A [Lamport timestamp](https://en.wikipedia.org/wiki/Lamport_timestamp),
/// used to determine the ordering of events in the editor.
#[derive(Clone, Copy, Default, Eq, Hash, PartialEq, Serialize, Deserialize)]
#[derive(Clone, Copy, Default, Eq, Hash, PartialEq)]
pub struct Lamport {
pub replica_id: ReplicaId,
pub value: Seq,
@@ -162,10 +161,6 @@ impl Lamport {
}
}
pub fn as_u64(self) -> u64 {
((self.value as u64) << 32) | (self.replica_id as u64)
}
pub fn tick(&mut self) -> Self {
let timestamp = *self;
self.value += 1;

View File

@@ -30,7 +30,6 @@ chrono.workspace = true
clock.workspace = true
clickhouse.workspace = true
collections.workspace = true
completion.workspace = true
dashmap = "5.4"
envy = "0.4.2"
futures.workspace = true
@@ -72,7 +71,6 @@ util.workspace = true
uuid.workspace = true
[dev-dependencies]
assistant = { workspace = true, features = ["test-support"] }
async-trait.workspace = true
audio.workspace = true
call = { workspace = true, features = ["test-support"] }
@@ -80,7 +78,6 @@ channel.workspace = true
client = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
completion = { workspace = true, features = ["test-support"] }
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
@@ -101,8 +98,6 @@ pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
recent_projects = { workspace = true }
release_channel.workspace = true
remote = { workspace = true, features = ["test-support"] }
remote_server.workspace = true
dev_server_projects.workspace = true
rpc = { workspace = true, features = ["test-support"] }
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }

View File

@@ -414,5 +414,5 @@ CREATE TABLE dev_servers (
CREATE TABLE dev_server_projects (
id INTEGER PRIMARY KEY AUTOINCREMENT,
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
paths TEXT NOT NULL
path TEXT NOT NULL
);

View File

@@ -1,4 +0,0 @@
ALTER TABLE dev_server_projects ADD COLUMN paths JSONB NULL;
UPDATE dev_server_projects SET paths = to_json(ARRAY[path]);
ALTER TABLE dev_server_projects ALTER COLUMN paths SET NOT NULL;
ALTER TABLE dev_server_projects ALTER COLUMN path DROP NOT NULL;

View File

@@ -43,36 +43,11 @@ async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let mut extensions = app
let extensions = app
.db
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?;
if let Some(filter) = params.filter.as_deref() {
let extension_id = filter.to_lowercase();
let mut exact_match = None;
extensions.retain(|extension| {
if extension.id.as_ref() == &extension_id {
exact_match = Some(extension.clone());
false
} else {
true
}
});
if exact_match.is_none() {
exact_match = app
.db
.get_extensions_by_ids(&[&extension_id], None)
.await?
.first()
.cloned();
}
if let Some(exact_match) = exact_match {
extensions.insert(0, exact_match);
}
};
if let Some(query) = params.filter.as_deref() {
let count = extensions.len();
tracing::info!(query, count, "extension_search")

View File

@@ -5,7 +5,7 @@ use rpc::{
};
use sea_orm::{
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
IntoActiveModel, ModelTrait, QueryFilter,
ModelTrait, QueryFilter,
};
use crate::db::ProjectId;
@@ -56,7 +56,12 @@ impl Database {
.await?;
Ok(servers
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.map(|(dev_server_project, project)| proto::DevServerProject {
id: dev_server_project.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
dev_server_id: dev_server_project.dev_server_id.to_proto(),
path: dev_server_project.path,
})
.collect())
}
@@ -129,7 +134,7 @@ impl Database {
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
id: ActiveValue::NotSet,
dev_server_id: ActiveValue::Set(dev_server_id),
paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
path: ActiveValue::Set(path.to_string()),
})
.exec_with_returning(&*tx)
.await?;
@@ -143,38 +148,6 @@ impl Database {
.await
}
pub async fn update_dev_server_project(
&self,
id: DevServerProjectId,
paths: &Vec<String>,
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(move |tx| async move {
let paths = paths.clone();
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
else {
return Err(anyhow!("no such dev server project"))?;
};
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let mut project = project.into_active_model();
project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
let project = project.update(&*tx).await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn delete_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
@@ -285,6 +258,7 @@ impl Database {
dev_server_id: DevServerId,
connection: ConnectionId,
) -> crate::Result<Vec<ResharedProject>> {
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
self.transaction(|tx| async move {
let mut ret = Vec::new();
for reshared_project in reshared_projects {
@@ -348,6 +322,7 @@ impl Database {
user_id: UserId,
connection_id: ConnectionId,
) -> crate::Result<Vec<RejoinedProject>> {
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
self.transaction(|tx| async move {
let mut ret = Vec::new();
for rejoined_project in rejoined_projects {

View File

@@ -19,28 +19,6 @@ impl Database {
.await
}
pub async fn get_dev_server_for_user(
&self,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
let server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
if server.user_id != user_id {
return Err(anyhow::anyhow!(
"dev server {} is not owned by user {}",
dev_server_id,
user_id
))?;
}
Ok(server)
})
.await
}
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find()

View File

@@ -0,0 +1 @@

View File

@@ -1,8 +1,7 @@
use super::project;
use crate::db::{DevServerId, DevServerProjectId};
use rpc::proto;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_server_projects")]
@@ -10,12 +9,9 @@ pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerProjectId,
pub dev_server_id: DevServerId,
pub paths: JSONPaths,
pub path: String,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct JSONPaths(pub Vec<String>);
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -48,12 +44,7 @@ impl Model {
id: self.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
dev_server_id: self.dev_server_id.to_proto(),
path: self.paths().get(0).cloned().unwrap_or_default(),
paths: self.paths().clone(),
path: self.path.clone(),
}
}
pub fn paths(&self) -> &Vec<String> {
&self.paths.0
}
}

View File

@@ -562,7 +562,7 @@ fn test_fuzzy_like_string() {
assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
}
#[cfg(target_os = "macos")]
#[cfg(target = "macos")]
#[gpui::test]
async fn test_fuzzy_search_users(cx: &mut gpui::TestAppContext) {
let test_db = tests::TestDb::postgres(cx.executor());

View File

@@ -431,13 +431,11 @@ impl Server {
.add_request_handler(user_handler(join_hosted_project))
.add_request_handler(user_handler(rejoin_dev_server_projects))
.add_request_handler(user_handler(create_dev_server_project))
.add_request_handler(user_handler(update_dev_server_project))
.add_request_handler(user_handler(delete_dev_server_project))
.add_request_handler(user_handler(create_dev_server))
.add_request_handler(user_handler(regenerate_dev_server_token))
.add_request_handler(user_handler(rename_dev_server))
.add_request_handler(user_handler(delete_dev_server))
.add_request_handler(user_handler(list_remote_directory))
.add_request_handler(dev_server_handler(share_dev_server_project))
.add_request_handler(dev_server_handler(shutdown_dev_server))
.add_request_handler(dev_server_handler(reconnect_dev_server))
@@ -597,14 +595,6 @@ impl Server {
.add_message_handler(user_message_handler(acknowledge_channel_message))
.add_message_handler(user_message_handler(acknowledge_buffer_version))
.add_request_handler(user_handler(get_supermaven_api_key))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::OpenContext>,
))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::SynchronizeContexts>,
))
.add_message_handler(broadcast_project_message_from_host::<proto::AdvertiseContexts>)
.add_message_handler(update_context)
.add_streaming_request_handler({
let app_state = app_state.clone();
move |request, response, session| {
@@ -2315,69 +2305,6 @@ async fn join_hosted_project(
join_project_internal(response, session, &mut project, &replica_id)
}
async fn list_remote_directory(
request: proto::ListRemoteDirectory,
response: Response<proto::ListRemoteDirectory>,
session: UserSession,
) -> Result<()> {
let dev_server_id = DevServerId(request.dev_server_id as i32);
let dev_server_connection_id = session
.connection_pool()
.await
.dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?;
session
.db()
.await
.get_dev_server_for_user(dev_server_id, session.user_id())
.await?;
response.send(
session
.peer
.forward_request(session.connection_id, dev_server_connection_id, request)
.await?,
)?;
Ok(())
}
async fn update_dev_server_project(
request: proto::UpdateDevServerProject,
response: Response<proto::UpdateDevServerProject>,
session: UserSession,
) -> Result<()> {
let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32);
let (dev_server_project, update) = session
.db()
.await
.update_dev_server_project(dev_server_project_id, &request.paths, session.user_id())
.await?;
let projects = session
.db()
.await
.get_projects_for_dev_server(dev_server_project.dev_server_id)
.await?;
let dev_server_connection_id = session
.connection_pool()
.await
.dev_server_connection_id_supporting(
dev_server_project.dev_server_id,
ZedVersion::with_list_directory(),
)?;
session.peer.send(
dev_server_connection_id,
proto::DevServerInstructions { projects },
)?;
send_dev_server_projects_update(session.user_id(), update, &session).await;
response.send(proto::Ack {})
}
async fn create_dev_server_project(
request: proto::CreateDevServerProject,
response: Response<proto::CreateDevServerProject>,
@@ -3129,53 +3056,6 @@ async fn update_buffer(
Ok(())
}
async fn update_context(message: proto::UpdateContext, session: Session) -> Result<()> {
let project_id = ProjectId::from_proto(message.project_id);
let operation = message.operation.as_ref().context("invalid operation")?;
let capability = match operation.variant.as_ref() {
Some(proto::context_operation::Variant::BufferOperation(buffer_op)) => {
if let Some(buffer_op) = buffer_op.operation.as_ref() {
match buffer_op.variant {
None | Some(proto::operation::Variant::UpdateSelections(_)) => {
Capability::ReadOnly
}
_ => Capability::ReadWrite,
}
} else {
Capability::ReadWrite
}
}
Some(_) => Capability::ReadWrite,
None => Capability::ReadOnly,
};
let guard = session
.db()
.await
.connections_for_buffer_update(
project_id,
session.principal_id(),
session.connection_id,
capability,
)
.await?;
let (host, guests) = &*guard;
broadcast(
Some(session.connection_id),
guests.iter().chain([host]).copied(),
|connection_id| {
session
.peer
.forward_send(session.connection_id, connection_id, message.clone())
},
);
Ok(())
}
/// Notify other participants that a project has been updated.
async fn broadcast_project_message_from_host<T: EntityMessage<Entity = ShareProject>>(
request: T,
@@ -4514,7 +4394,7 @@ impl RateLimit for CompleteWithLanguageModelRateLimit {
}
async fn complete_with_language_model(
mut request: proto::CompleteWithLanguageModel,
request: proto::CompleteWithLanguageModel,
response: StreamingResponse<proto::CompleteWithLanguageModel>,
session: Session,
open_ai_api_key: Option<Arc<str>>,
@@ -4530,43 +4410,18 @@ async fn complete_with_language_model(
.check::<CompleteWithLanguageModelRateLimit>(session.user_id())
.await?;
let mut provider_and_model = request.model.split('/');
let (provider, model) = match (
provider_and_model.next().unwrap(),
provider_and_model.next(),
) {
(provider, Some(model)) => (provider, model),
(model, None) => {
if model.starts_with("gpt") {
("openai", model)
} else if model.starts_with("gemini") {
("google", model)
} else if model.starts_with("claude") {
("anthropic", model)
} else {
("unknown", model)
}
}
};
let provider = provider.to_string();
request.model = model.to_string();
match provider.as_str() {
"openai" => {
let api_key = open_ai_api_key.context("no OpenAI API key configured on the server")?;
complete_with_open_ai(request, response, session, api_key).await?;
}
"anthropic" => {
let api_key =
anthropic_api_key.context("no Anthropic AI API key configured on the server")?;
complete_with_anthropic(request, response, session, api_key).await?;
}
"google" => {
let api_key =
google_ai_api_key.context("no Google AI API key configured on the server")?;
complete_with_google_ai(request, response, session, api_key).await?;
}
provider => return Err(anyhow!("unknown provider {:?}", provider))?,
if request.model.starts_with("gpt") {
let api_key =
open_ai_api_key.ok_or_else(|| anyhow!("no OpenAI API key configured on the server"))?;
complete_with_open_ai(request, response, session, api_key).await?;
} else if request.model.starts_with("gemini") {
let api_key = google_ai_api_key
.ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?;
complete_with_google_ai(request, response, session, api_key).await?;
} else if request.model.starts_with("claude") {
let api_key = anthropic_api_key
.ok_or_else(|| anyhow!("no Anthropic AI API key configured on the server"))?;
complete_with_anthropic(request, response, session, api_key).await?;
}
Ok(())

View File

@@ -38,10 +38,6 @@ impl ZedVersion {
pub fn with_save_as() -> ZedVersion {
ZedVersion(SemanticVersion::new(0, 134, 0))
}
pub fn with_list_directory() -> ZedVersion {
ZedVersion(SemanticVersion::new(0, 145, 0))
}
}
pub trait VersionedMessage {
@@ -191,18 +187,6 @@ impl ConnectionPool {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn dev_server_connection_id_supporting(
&self,
dev_server_id: DevServerId,
required: ZedVersion,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn channel_user_ids(
&self,
channel_id: ChannelId,

View File

@@ -16,7 +16,6 @@ mod notification_tests;
mod random_channel_buffer_tests;
mod random_project_collaboration_tests;
mod randomized_test_helpers;
mod remote_editing_collaboration_tests;
mod test_server;
use language::{tree_sitter_rust, Language, LanguageConfig, LanguageMatcher};

View File

@@ -66,7 +66,7 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
assert_eq!(projects[0].path, "/remote");
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
@@ -206,7 +206,7 @@ async fn create_dev_server_project(
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
assert_eq!(projects[0].path, "/remote");
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),

View File

@@ -135,7 +135,7 @@ async fn test_basic_following(
assert_eq!(editor.selections.ranges(cx), vec![2..1]);
});
// When client B starts following client A, only the active view state is replicated to client B.
// When client B starts following client A, all visible view states are replicated to client B.
workspace_b.update(cx_b, |workspace, cx| workspace.follow(peer_id_a, cx));
cx_c.executor().run_until_parked();
@@ -156,7 +156,7 @@ async fn test_basic_following(
);
assert_eq!(
editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
vec![3..3]
vec![3..2]
);
executor.run_until_parked();
@@ -194,7 +194,7 @@ async fn test_basic_following(
// Client C unfollows client A.
workspace_c.update(cx_c, |workspace, cx| {
workspace.unfollow(peer_id_a, cx).unwrap();
workspace.unfollow(&workspace.active_pane().clone(), cx);
});
// All clients see that clients B is following client A.
@@ -266,7 +266,7 @@ async fn test_basic_following(
// When client A activates a different editor, client B does so as well.
workspace_a.update(cx_a, |workspace, cx| {
workspace.activate_item(&editor_a1, true, true, cx)
workspace.activate_item(&editor_a1, cx)
});
executor.run_until_parked();
workspace_b.update(cx_b, |workspace, cx| {
@@ -311,7 +311,7 @@ async fn test_basic_following(
let editor = cx.new_view(|cx| {
Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), true, cx)
});
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, cx);
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx);
editor
});
executor.run_until_parked();
@@ -398,10 +398,10 @@ async fn test_basic_following(
// After unfollowing, client B stops receiving updates from client A.
workspace_b.update(cx_b, |workspace, cx| {
workspace.unfollow(peer_id_a, cx).unwrap()
workspace.unfollow(&workspace.active_pane().clone(), cx)
});
workspace_a.update(cx_a, |workspace, cx| {
workspace.activate_item(&editor_a2, true, true, cx)
workspace.activate_item(&editor_a2, cx)
});
executor.run_until_parked();
assert_eq!(
@@ -466,7 +466,7 @@ async fn test_basic_following(
// Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
workspace_b.update(cx_b, |workspace, cx| {
workspace.activate_item(&multibuffer_editor_b, true, true, cx)
workspace.activate_item(&multibuffer_editor_b, cx)
});
executor.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {

View File

@@ -6,7 +6,6 @@ use crate::{
},
};
use anyhow::{anyhow, Result};
use assistant::ContextStore;
use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT};
use collections::{HashMap, HashSet};
@@ -1528,7 +1527,7 @@ async fn test_project_reconnect(
});
let (worktree_a2, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir2", true, cx)
p.find_or_create_local_worktree("/root-1/dir2", true, cx)
})
.await
.unwrap();
@@ -1601,7 +1600,7 @@ async fn test_project_reconnect(
});
let (worktree_a3, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir3", true, cx)
p.find_or_create_local_worktree("/root-1/dir3", true, cx)
})
.await
.unwrap();
@@ -1725,7 +1724,7 @@ async fn test_project_reconnect(
// While client B is disconnected, add and remove worktrees from client A's project.
let (worktree_a4, _) = project_a1
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root-1/dir4", true, cx)
p.find_or_create_local_worktree("/root-1/dir4", true, cx)
})
.await
.unwrap();
@@ -3327,7 +3326,7 @@ async fn test_local_settings(
let store = cx.global::<SettingsStore>();
assert_eq!(
store
.local_settings(worktree_b.entity_id().as_u64() as _)
.local_settings(worktree_b.read(cx).id().to_usize())
.collect::<Vec<_>>(),
&[
(Path::new("").into(), r#"{"tab_size":2}"#.to_string()),
@@ -3346,7 +3345,7 @@ async fn test_local_settings(
let store = cx.global::<SettingsStore>();
assert_eq!(
store
.local_settings(worktree_b.entity_id().as_u64() as _)
.local_settings(worktree_b.read(cx).id().to_usize())
.collect::<Vec<_>>(),
&[
(Path::new("").into(), r#"{}"#.to_string()),
@@ -3375,7 +3374,7 @@ async fn test_local_settings(
let store = cx.global::<SettingsStore>();
assert_eq!(
store
.local_settings(worktree_b.entity_id().as_u64() as _)
.local_settings(worktree_b.read(cx).id().to_usize())
.collect::<Vec<_>>(),
&[
(Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
@@ -3407,7 +3406,7 @@ async fn test_local_settings(
let store = cx.global::<SettingsStore>();
assert_eq!(
store
.local_settings(worktree_b.entity_id().as_u64() as _)
.local_settings(worktree_b.read(cx).id().to_usize())
.collect::<Vec<_>>(),
&[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),]
)
@@ -4887,7 +4886,7 @@ async fn test_project_search(
let (project_a, _) = client_a.build_local_project("/root/dir-1", cx_a).await;
let (worktree_2, _) = project_a
.update(cx_a, |p, cx| {
p.find_or_create_worktree("/root/dir-2", true, cx)
p.find_or_create_local_worktree("/root/dir-2", true, cx)
})
.await
.unwrap();
@@ -6450,123 +6449,3 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
assert!(!pane.can_navigate_forward());
});
}
#[gpui::test(iterations = 10)]
async fn test_context_collaboration_with_reconnect(
executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
client_a.fs().insert_tree("/a", Default::default()).await;
let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
// Client A sees that a guest has joined.
executor.run_until_parked();
project_a.read_with(cx_a, |project, _| {
assert_eq!(project.collaborators().len(), 1);
});
project_b.read_with(cx_b, |project, _| {
assert_eq!(project.collaborators().len(), 1);
});
let context_store_a = cx_a
.update(|cx| ContextStore::new(project_a.clone(), cx))
.await
.unwrap();
let context_store_b = cx_b
.update(|cx| ContextStore::new(project_b.clone(), cx))
.await
.unwrap();
// Client A creates a new context.
let context_a = context_store_a.update(cx_a, |store, cx| store.create(cx));
executor.run_until_parked();
// Client B retrieves host's contexts and joins one.
let context_b = context_store_b
.update(cx_b, |store, cx| {
let host_contexts = store.host_contexts().to_vec();
assert_eq!(host_contexts.len(), 1);
store.open_remote_context(host_contexts[0].id.clone(), cx)
})
.await
.unwrap();
// Host and guest make changes
context_a.update(cx_a, |context, cx| {
context.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "Host change\n")], None, cx)
})
});
context_b.update(cx_b, |context, cx| {
context.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "Guest change\n")], None, cx)
})
});
executor.run_until_parked();
assert_eq!(
context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()),
"Guest change\nHost change\n"
);
assert_eq!(
context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()),
"Guest change\nHost change\n"
);
// Disconnect client A and make some changes while disconnected.
server.disconnect_client(client_a.peer_id().unwrap());
server.forbid_connections();
context_a.update(cx_a, |context, cx| {
context.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "Host offline change\n")], None, cx)
})
});
context_b.update(cx_b, |context, cx| {
context.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "Guest offline change\n")], None, cx)
})
});
executor.run_until_parked();
assert_eq!(
context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()),
"Host offline change\nGuest change\nHost change\n"
);
assert_eq!(
context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()),
"Guest offline change\nGuest change\nHost change\n"
);
// Allow client A to reconnect and verify that contexts converge.
server.allow_connections();
executor.advance_clock(RECEIVE_TIMEOUT);
assert_eq!(
context_a.read_with(cx_a, |context, cx| context.buffer().read(cx).text()),
"Guest offline change\nHost offline change\nGuest change\nHost change\n"
);
assert_eq!(
context_b.read_with(cx_b, |context, cx| context.buffer().read(cx).text()),
"Guest offline change\nHost offline change\nGuest change\nHost change\n"
);
// Client A disconnects without being able to reconnect. Context B becomes readonly.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
context_b.read_with(cx_b, |context, cx| {
assert!(context.buffer().read(cx).read_only());
});
}

View File

@@ -581,7 +581,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
project
.update(cx, |project, cx| {
project.find_or_create_worktree(&new_root_path, true, cx)
project.find_or_create_local_worktree(&new_root_path, true, cx)
})
.await
.unwrap();
@@ -1237,7 +1237,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
}
for buffer in guest_project.opened_buffers(cx) {
for buffer in guest_project.opened_buffers() {
let buffer = buffer.read(cx);
assert_eq!(
buffer.deferred_ops_len(),
@@ -1287,8 +1287,8 @@ impl RandomizedTest for ProjectCollaborationTest {
for guest_buffer in guest_buffers {
let buffer_id =
guest_buffer.read_with(client_cx, |buffer, _| buffer.remote_id());
let host_buffer = host_project.read_with(host_cx, |project, cx| {
project.buffer_for_id(buffer_id, cx).unwrap_or_else(|| {
let host_buffer = host_project.read_with(host_cx, |project, _| {
project.buffer_for_id(buffer_id).unwrap_or_else(|| {
panic!(
"host does not have buffer for guest:{}, peer:{:?}, id:{}",
client.username,

View File

@@ -1,102 +0,0 @@
use crate::tests::TestServer;
use call::ActiveCall;
use fs::{FakeFs, Fs as _};
use gpui::{Context as _, TestAppContext};
use remote::SshSession;
use remote_server::HeadlessProject;
use serde_json::json;
use std::{path::Path, sync::Arc};
#[gpui::test]
async fn test_sharing_an_ssh_remote_project(
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
server_cx: &mut TestAppContext,
) {
let executor = cx_a.executor();
let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
// Set up project on remote FS
let (client_ssh, server_ssh) = SshSession::fake(cx_a, server_cx);
let remote_fs = FakeFs::new(server_cx.executor());
remote_fs
.insert_tree(
"/code",
json!({
"project1": {
"README.md": "# project 1",
"src": {
"lib.rs": "fn one() -> usize { 1 }"
}
},
"project2": {
"README.md": "# project 2",
},
}),
)
.await;
// User A connects to the remote project via SSH.
server_cx.update(HeadlessProject::init);
let _headless_project =
server_cx.new_model(|cx| HeadlessProject::new(server_ssh, remote_fs.clone(), cx));
let (project_a, worktree_id) = client_a
.build_ssh_project("/code/project1", client_ssh, cx_a)
.await;
// User A shares the remote project.
let active_call_a = cx_a.read(ActiveCall::global);
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
// User B joins the project.
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
let worktree_b = project_b
.update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx))
.unwrap();
executor.run_until_parked();
worktree_b.update(cx_b, |worktree, _cx| {
assert_eq!(
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
vec![
Path::new("README.md"),
Path::new("src"),
Path::new("src/lib.rs"),
]
);
});
// User B can open buffers in the remote project.
let buffer_b = project_b
.update(cx_b, |project, cx| {
project.open_buffer((worktree_id, "src/lib.rs"), cx)
})
.await
.unwrap();
buffer_b.update(cx_b, |buffer, cx| {
assert_eq!(buffer.text(), "fn one() -> usize { 1 }");
let ix = buffer.text().find('1').unwrap();
buffer.edit([(ix..ix + 1, "100")], None, cx);
});
project_b
.update(cx_b, |project, cx| project.save_buffer(buffer_b, cx))
.await
.unwrap();
assert_eq!(
remote_fs
.load("/code/project1/src/lib.rs".as_ref())
.await
.unwrap(),
"fn one() -> usize { 100 }"
);
}

View File

@@ -25,7 +25,6 @@ use node_runtime::FakeNodeRuntime;
use notifications::NotificationStore;
use parking_lot::Mutex;
use project::{Project, WorktreeId};
use remote::SshSession;
use rpc::{
proto::{self, ChannelRole},
RECEIVE_TIMEOUT,
@@ -295,8 +294,6 @@ impl TestServer {
menu::init();
dev_server_projects::init(client.clone(), cx);
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
completion::FakeCompletionProvider::setup_test(cx);
assistant::context_store::init(&client);
});
client
@@ -806,7 +803,9 @@ impl TestClient {
) -> (Model<Project>, WorktreeId) {
let project = self.build_empty_local_project(cx);
let (worktree, _) = project
.update(cx, |p, cx| p.find_or_create_worktree(root_path, true, cx))
.update(cx, |p, cx| {
p.find_or_create_local_worktree(root_path, true, cx)
})
.await
.unwrap();
worktree
@@ -815,30 +814,6 @@ impl TestClient {
(project, worktree.read_with(cx, |tree, _| tree.id()))
}
pub async fn build_ssh_project(
&self,
root_path: impl AsRef<Path>,
ssh: Arc<SshSession>,
cx: &mut TestAppContext,
) -> (Model<Project>, WorktreeId) {
let project = cx.update(|cx| {
Project::ssh(
ssh,
self.client().clone(),
self.app_state.node_runtime.clone(),
self.app_state.user_store.clone(),
self.app_state.languages.clone(),
self.app_state.fs.clone(),
cx,
)
});
let (worktree, _) = project
.update(cx, |p, cx| p.find_or_create_worktree(root_path, true, cx))
.await
.unwrap();
(project, worktree.read_with(cx, |tree, _| tree.id()))
}
pub async fn build_test_project(&self, cx: &mut TestAppContext) -> Model<Project> {
self.fs()
.insert_tree(

View File

@@ -32,7 +32,6 @@ test-support = [
anyhow.workspace = true
call.workspace = true
channel.workspace = true
chrono.workspace = true
client.workspace = true
collections.workspace = true
db.workspace = true

View File

@@ -11,22 +11,21 @@ use editor::{
EditorEvent,
};
use gpui::{
actions, AnyView, AppContext, ClipboardItem, Entity as _, EventEmitter, FocusableView, Model,
Pixels, Point, Render, Subscription, Task, View, ViewContext, VisualContext as _, WeakView,
WindowContext,
actions, AnyElement, AnyView, AppContext, ClipboardItem, Entity as _, EventEmitter,
FocusableView, IntoElement as _, Model, Pixels, Point, Render, Subscription, Task, View,
ViewContext, VisualContext as _, WeakView, WindowContext,
};
use project::Project;
use rpc::proto::ChannelVisibility;
use std::{
any::{Any, TypeId},
sync::Arc,
};
use ui::prelude::*;
use ui::{prelude::*, Label};
use util::ResultExt;
use workspace::item::TabContentParams;
use workspace::{item::Dedup, notifications::NotificationId};
use workspace::notifications::NotificationId;
use workspace::{
item::{FollowableItem, Item, ItemEvent, ItemHandle},
item::{FollowableItem, Item, ItemEvent, ItemHandle, TabContentParams},
register_followable_item,
searchable::SearchableItemHandle,
ItemNavHistory, Pane, SaveIntent, Toast, ViewId, Workspace, WorkspaceId,
};
@@ -34,7 +33,7 @@ use workspace::{
actions!(collab, [CopyLink]);
pub fn init(cx: &mut AppContext) {
workspace::FollowableViewRegistry::register::<ChannelView>(cx)
register_followable_item::<ChannelView>(cx)
}
pub struct ChannelView {
@@ -84,56 +83,6 @@ impl ChannelView {
pane: View<Pane>,
workspace: View<Workspace>,
cx: &mut WindowContext,
) -> Task<Result<View<Self>>> {
let channel_view = Self::load(channel_id, workspace, cx);
cx.spawn(|mut cx| async move {
let channel_view = channel_view.await?;
pane.update(&mut cx, |pane, cx| {
let buffer_id = channel_view.read(cx).channel_buffer.read(cx).remote_id(cx);
let existing_view = pane
.items_of_type::<Self>()
.find(|view| view.read(cx).channel_buffer.read(cx).remote_id(cx) == buffer_id);
// If this channel buffer is already open in this pane, just return it.
if let Some(existing_view) = existing_view.clone() {
if existing_view.read(cx).channel_buffer == channel_view.read(cx).channel_buffer
{
if let Some(link_position) = link_position {
existing_view.update(cx, |channel_view, cx| {
channel_view.focus_position_from_link(link_position, true, cx)
});
}
return existing_view;
}
}
// If the pane contained a disconnected view for this channel buffer,
// replace that.
if let Some(existing_item) = existing_view {
if let Some(ix) = pane.index_for_item(&existing_item) {
pane.close_item_by_id(existing_item.entity_id(), SaveIntent::Skip, cx)
.detach();
pane.add_item(Box::new(channel_view.clone()), true, true, Some(ix), cx);
}
}
if let Some(link_position) = link_position {
channel_view.update(cx, |channel_view, cx| {
channel_view.focus_position_from_link(link_position, true, cx)
});
}
channel_view
})
})
}
pub fn load(
channel_id: ChannelId,
workspace: View<Workspace>,
cx: &mut WindowContext,
) -> Task<Result<View<Self>>> {
let weak_workspace = workspace.downgrade();
let workspace = workspace.read(cx);
@@ -158,11 +107,49 @@ impl ChannelView {
})
})?;
cx.new_view(|cx| {
let mut this =
Self::new(project, weak_workspace, channel_store, channel_buffer, cx);
this.acknowledge_buffer_version(cx);
this
pane.update(&mut cx, |pane, cx| {
let buffer_id = channel_buffer.read(cx).remote_id(cx);
let existing_view = pane
.items_of_type::<Self>()
.find(|view| view.read(cx).channel_buffer.read(cx).remote_id(cx) == buffer_id);
// If this channel buffer is already open in this pane, just return it.
if let Some(existing_view) = existing_view.clone() {
if existing_view.read(cx).channel_buffer == channel_buffer {
if let Some(link_position) = link_position {
existing_view.update(cx, |channel_view, cx| {
channel_view.focus_position_from_link(link_position, true, cx)
});
}
return existing_view;
}
}
let view = cx.new_view(|cx| {
let mut this =
Self::new(project, weak_workspace, channel_store, channel_buffer, cx);
this.acknowledge_buffer_version(cx);
this
});
// If the pane contained a disconnected view for this channel buffer,
// replace that.
if let Some(existing_item) = existing_view {
if let Some(ix) = pane.index_for_item(&existing_item) {
pane.close_item_by_id(existing_item.entity_id(), SaveIntent::Skip, cx)
.detach();
pane.add_item(Box::new(view.clone()), true, true, Some(ix), cx);
}
}
if let Some(link_position) = link_position {
view.update(cx, |channel_view, cx| {
channel_view.focus_position_from_link(link_position, true, cx)
});
}
view
})
})
}
@@ -387,45 +374,24 @@ impl Item for ChannelView {
}
}
fn tab_icon(&self, cx: &WindowContext) -> Option<Icon> {
let channel = self.channel(cx)?;
let icon = match channel.visibility {
ChannelVisibility::Public => IconName::Public,
ChannelVisibility::Members => IconName::Hash,
};
Some(Icon::new(icon))
}
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> gpui::AnyElement {
let (channel_name, status) = if let Some(channel) = self.channel(cx) {
let status = match (
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
let label = if let Some(channel) = self.channel(cx) {
match (
self.channel_buffer.read(cx).buffer().read(cx).read_only(),
self.channel_buffer.read(cx).is_connected(),
) {
(false, true) => None,
(true, true) => Some("read-only"),
(_, false) => Some("disconnected"),
};
(channel.name.clone(), status)
(false, true) => format!("#{}", channel.name),
(true, true) => format!("#{} (read-only)", channel.name),
(_, false) => format!("#{} (disconnected)", channel.name),
}
} else {
("<unknown>".into(), Some("disconnected"))
"channel notes (disconnected)".to_string()
};
h_flex()
.gap_2()
.child(
Label::new(channel_name)
.color(params.text_color())
.italic(params.preview),
)
.when_some(status, |element, status| {
element.child(
Label::new(status)
.size(LabelSize::XSmall)
.color(Color::Muted),
)
Label::new(label)
.color(if params.selected {
Color::Default
} else {
Color::Muted
})
.into_any_element()
}
@@ -512,6 +478,7 @@ impl FollowableItem for ChannelView {
}
fn from_state_proto(
pane: View<workspace::Pane>,
workspace: View<workspace::Workspace>,
remote_id: workspace::ViewId,
state: &mut Option<proto::view::Variant>,
@@ -524,7 +491,8 @@ impl FollowableItem for ChannelView {
unreachable!()
};
let open = ChannelView::load(ChannelId(state.channel_id), workspace, cx);
let open =
ChannelView::open_in_pane(ChannelId(state.channel_id), None, pane, workspace, cx);
Some(cx.spawn(|mut cx| async move {
let this = open.await?;
@@ -595,19 +563,6 @@ impl FollowableItem for ChannelView {
fn to_follow_event(event: &Self::Event) -> Option<workspace::item::FollowEvent> {
Editor::to_follow_event(event)
}
fn dedup(&self, existing: &Self, cx: &WindowContext) -> Option<Dedup> {
let existing = existing.channel_buffer.read(cx);
if self.channel_buffer.read(cx).channel_id == existing.channel_id {
if existing.is_connected() {
Some(Dedup::KeepExisting)
} else {
Some(Dedup::ReplaceExisting)
}
} else {
None
}
}
}
struct ChannelBufferCollaborationHub(Model<ChannelBuffer>);

View File

@@ -111,7 +111,6 @@ impl ChatPanel {
this.is_scrolled_to_bottom = !event.is_scrolled;
}));
let local_offset = chrono::Local::now().offset().local_minus_utc();
let mut this = Self {
fs,
client,
@@ -121,7 +120,7 @@ impl ChatPanel {
active_chat: Default::default(),
pending_serialization: Task::ready(None),
message_editor: input_editor,
local_timezone: UtcOffset::from_whole_seconds(local_offset).unwrap(),
local_timezone: cx.local_timezone(),
subscriptions: Vec::new(),
is_scrolled_to_bottom: true,
active: false,

View File

@@ -6,7 +6,7 @@ use editor::{AnchorRangeExt, CompletionProvider, Editor, EditorElement, EditorSt
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
AsyncWindowContext, FocusableView, FontStyle, FontWeight, HighlightStyle, IntoElement, Model,
Render, Task, TextStyle, View, ViewContext, WeakView,
Render, Task, TextStyle, View, ViewContext, WeakView, WhiteSpace,
};
use language::{
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry,
@@ -537,7 +537,10 @@ impl Render for MessageEditor {
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
div()

View File

@@ -16,7 +16,7 @@ use gpui::{
EventEmitter, FocusHandle, FocusableView, FontStyle, InteractiveElement, IntoElement,
ListOffset, ListState, Model, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel,
Render, SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext,
WeakView,
WeakView, WhiteSpace,
};
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
use project::{Fs, Project};
@@ -2194,7 +2194,10 @@ impl CollabPanel {
font_weight: settings.ui_font.weight,
font_style: FontStyle::Normal,
line_height: relative(1.3),
..Default::default()
background_color: None,
underline: None,
strikethrough: None,
white_space: WhiteSpace::Normal,
};
EditorElement::new(

View File

@@ -5,6 +5,7 @@ use gpui::{
};
use picker::{Picker, PickerDelegate};
use std::sync::Arc;
use theme::ActiveTheme as _;
use ui::{prelude::*, Avatar, ListItem, ListItemSpacing};
use util::{ResultExt as _, TryFutureExt};
use workspace::ModalView;

View File

@@ -127,12 +127,11 @@ impl NotificationPanel {
},
));
let local_offset = chrono::Local::now().offset().local_minus_utc();
let mut this = Self {
fs,
client,
user_store,
local_timezone: UtcOffset::from_whole_seconds(local_offset).unwrap(),
local_timezone: cx.local_timezone(),
channel_store: ChannelStore::global(cx),
notification_store: NotificationStore::global(cx),
notification_list,

View File

@@ -17,10 +17,9 @@ use gpui::{
use picker::{Picker, PickerDelegate};
use postage::{sink::Sink, stream::Stream};
use settings::Settings;
use ui::{h_flex, prelude::*, v_flex, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing};
use util::ResultExt;
use workspace::{ModalView, Workspace, WorkspaceSettings};
use workspace::{ModalView, Workspace};
use zed_actions::OpenZedUrl;
actions!(command_palette, [Toggle]);
@@ -249,13 +248,9 @@ impl PickerDelegate for CommandPaletteDelegate {
fn update_matches(
&mut self,
mut query: String,
query: String,
cx: &mut ViewContext<Picker<Self>>,
) -> gpui::Task<()> {
let settings = WorkspaceSettings::get_global(cx);
if let Some(alias) = settings.command_aliases.get(&query) {
query = alias.to_string();
}
let (mut tx, mut rx) = postage::dispatch::channel(1);
let task = cx.background_executor().spawn({
let mut commands = self.all_commands.clone();
@@ -482,7 +477,7 @@ mod tests {
});
workspace.update(cx, |workspace, cx| {
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, cx);
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx);
editor.update(cx, |editor, cx| editor.focus(cx))
});

View File

@@ -1,56 +0,0 @@
[package]
name = "completion"
version = "0.1.0"
edition = "2021"
publish = false
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/completion.rs"
doctest = false
[features]
test-support = [
"editor/test-support",
"language/test-support",
"project/test-support",
"text/test-support",
]
[dependencies]
anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
client.workspace = true
collections.workspace = true
editor.workspace = true
futures.workspace = true
gpui.workspace = true
http.workspace = true
language_model.workspace = true
log.workspace = true
menu.workspace = true
ollama = { workspace = true, features = ["schemars"] }
open_ai = { workspace = true, features = ["schemars"] }
parking_lot.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
strum.workspace = true
theme.workspace = true
tiktoken-rs.workspace = true
ui.workspace = true
util.workspace = true
[dev-dependencies]
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-GPL

View File

@@ -1,291 +0,0 @@
mod anthropic;
mod cloud;
#[cfg(any(test, feature = "test-support"))]
mod fake;
mod ollama;
mod open_ai;
pub use anthropic::*;
use anyhow::Result;
use client::Client;
pub use cloud::*;
#[cfg(any(test, feature = "test-support"))]
pub use fake::*;
use futures::{future::BoxFuture, stream::BoxStream, StreamExt};
use gpui::{AnyView, AppContext, Task, WindowContext};
use language_model::{LanguageModel, LanguageModelRequest};
pub use ollama::*;
pub use open_ai::*;
use parking_lot::RwLock;
use smol::lock::{Semaphore, SemaphoreGuardArc};
use std::{any::Any, pin::Pin, sync::Arc, task::Poll};
pub struct CompletionResponse {
inner: BoxStream<'static, Result<String>>,
_lock: SemaphoreGuardArc,
}
impl futures::Stream for CompletionResponse {
type Item = Result<String>;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.inner).poll_next(cx)
}
}
pub trait LanguageModelCompletionProvider: Send + Sync {
fn available_models(&self) -> Vec<LanguageModel>;
fn settings_version(&self) -> usize;
fn is_authenticated(&self) -> bool;
fn authenticate(&self, cx: &AppContext) -> Task<Result<()>>;
fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView;
fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>>;
fn model(&self) -> LanguageModel;
fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>>;
fn stream_completion(
&self,
request: LanguageModelRequest,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
fn as_any_mut(&mut self) -> &mut dyn Any;
}
const MAX_CONCURRENT_COMPLETION_REQUESTS: usize = 4;
pub struct CompletionProvider {
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
request_limiter: Arc<Semaphore>,
}
impl CompletionProvider {
pub fn new(
provider: Arc<RwLock<dyn LanguageModelCompletionProvider>>,
client: Option<Arc<Client>>,
) -> Self {
Self {
provider,
client,
request_limiter: Arc::new(Semaphore::new(MAX_CONCURRENT_COMPLETION_REQUESTS)),
}
}
pub fn available_models(&self) -> Vec<LanguageModel> {
self.provider.read().available_models()
}
pub fn settings_version(&self) -> usize {
self.provider.read().settings_version()
}
pub fn is_authenticated(&self) -> bool {
self.provider.read().is_authenticated()
}
pub fn authenticate(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().authenticate(cx)
}
pub fn authentication_prompt(&self, cx: &mut WindowContext) -> AnyView {
self.provider.read().authentication_prompt(cx)
}
pub fn reset_credentials(&self, cx: &AppContext) -> Task<Result<()>> {
self.provider.read().reset_credentials(cx)
}
pub fn model(&self) -> LanguageModel {
self.provider.read().model()
}
pub fn count_tokens(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
self.provider.read().count_tokens(request, cx)
}
pub fn stream_completion(
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> Task<Result<CompletionResponse>> {
let rate_limiter = self.request_limiter.clone();
let provider = self.provider.clone();
cx.foreground_executor().spawn(async move {
let lock = rate_limiter.acquire_arc().await;
let response = provider.read().stream_completion(request);
let response = response.await?;
Ok(CompletionResponse {
inner: response,
_lock: lock,
})
})
}
pub fn stream_completion_bg(
// TODO consider unifying this with stream_completion, since they share so much code
// (design question: do we want to make a Spawn or perhaps Executor trait to abstract over bg and fg executors?)
&self,
request: LanguageModelRequest,
cx: &AppContext,
) -> Task<Result<CompletionResponse>> {
let rate_limiter = self.request_limiter.clone();
let provider = self.provider.clone();
cx.background_executor().spawn(async move {
let lock = rate_limiter.acquire_arc().await;
let response = provider.read().stream_completion(request);
let response = response.await?;
Ok(CompletionResponse {
inner: response,
_lock: lock,
})
})
}
pub fn complete(&self, request: LanguageModelRequest, cx: &AppContext) -> Task<Result<String>> {
let response = self.stream_completion(request, cx);
cx.foreground_executor().spawn(async move {
let mut chunks = response.await?;
let mut completion = String::new();
while let Some(chunk) = chunks.next().await {
let chunk = chunk?;
completion.push_str(&chunk);
}
Ok(completion)
})
}
pub fn update_provider(
&mut self,
get_provider: impl FnOnce(Arc<Client>) -> Arc<RwLock<dyn LanguageModelCompletionProvider>>,
) {
if let Some(client) = &self.client {
self.provider = get_provider(Arc::clone(client));
} else {
log::warn!("completion provider cannot be updated because its client was not set");
}
}
}
impl gpui::Global for CompletionProvider {}
impl CompletionProvider {
pub fn global(cx: &AppContext) -> &Self {
cx.global::<Self>()
}
pub fn update_current_as<R, T: LanguageModelCompletionProvider + 'static>(
&mut self,
update: impl FnOnce(&mut T) -> R,
) -> Option<R> {
let mut provider = self.provider.write();
if let Some(provider) = provider.as_any_mut().downcast_mut::<T>() {
Some(update(provider))
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use gpui::AppContext;
use parking_lot::RwLock;
use settings::SettingsStore;
use smol::stream::StreamExt;
use crate::{
CompletionProvider, FakeCompletionProvider, LanguageModelRequest,
MAX_CONCURRENT_COMPLETION_REQUESTS,
};
#[gpui::test]
fn test_rate_limiting(cx: &mut AppContext) {
SettingsStore::test(cx);
let fake_provider = FakeCompletionProvider::setup_test(cx);
let provider = CompletionProvider::new(Arc::new(RwLock::new(fake_provider.clone())), None);
// Enqueue some requests
for i in 0..MAX_CONCURRENT_COMPLETION_REQUESTS * 2 {
let response = provider.stream_completion(
LanguageModelRequest {
temperature: i as f32 / 10.0,
..Default::default()
},
cx,
);
cx.background_executor()
.spawn(async move {
let mut stream = response.await.unwrap();
while let Some(message) = stream.next().await {
message.unwrap();
}
})
.detach();
}
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Get the first completion request that is in flight and mark it as completed.
let completion = fake_provider
.pending_completions()
.into_iter()
.next()
.unwrap();
fake_provider.finish_completion(&completion);
// Ensure that the number of in-flight completion requests is reduced.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
cx.background_executor().run_until_parked();
// Ensure that another completion request was allowed to acquire the lock.
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS
);
// Mark all completion requests as finished that are in flight.
for request in fake_provider.pending_completions() {
fake_provider.finish_completion(&request);
}
assert_eq!(fake_provider.completion_count(), 0);
// Wait until the background tasks acquire the lock again.
cx.background_executor().run_until_parked();
assert_eq!(
fake_provider.completion_count(),
MAX_CONCURRENT_COMPLETION_REQUESTS - 1
);
// Finish all remaining completion requests.
for request in fake_provider.pending_completions() {
fake_provider.finish_completion(&request);
}
cx.background_executor().run_until_parked();
assert_eq!(fake_provider.completion_count(), 0);
}
}

View File

@@ -1236,7 +1236,7 @@ mod tests {
unimplemented!()
}
fn to_proto(&self, _: &AppContext) -> rpc::proto::File {
fn to_proto(&self) -> rpc::proto::File {
unimplemented!()
}

View File

@@ -20,7 +20,7 @@ pub struct Store {
pub struct DevServerProject {
pub id: DevServerProjectId,
pub project_id: Option<ProjectId>,
pub paths: Vec<SharedString>,
pub path: SharedString,
pub dev_server_id: DevServerId,
}
@@ -29,7 +29,7 @@ impl From<proto::DevServerProject> for DevServerProject {
Self {
id: DevServerProjectId(project.id),
project_id: project.project_id.map(|id| ProjectId(id)),
paths: project.paths.into_iter().map(|path| path.into()).collect(),
path: project.path.into(),
dev_server_id: DevServerId(project.dev_server_id),
}
}
@@ -85,7 +85,7 @@ impl Store {
.filter(|project| project.dev_server_id == id)
.cloned()
.collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects.sort_by_key(|p| (p.path.clone(), p.id));
projects
}
@@ -108,7 +108,7 @@ impl Store {
pub fn dev_server_projects(&self) -> Vec<DevServerProject> {
let mut projects: Vec<DevServerProject> =
self.dev_server_projects.values().cloned().collect();
projects.sort_by_key(|p| (p.paths.clone(), p.id));
projects.sort_by_key(|p| (p.path.clone(), p.id));
projects
}

View File

@@ -18,13 +18,11 @@ collections.workspace = true
ctor.workspace = true
editor.workspace = true
env_logger.workspace = true
feature_flags.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true
multi_buffer.workspace = true
project.workspace = true
rand.workspace = true
schemars.workspace = true

View File

@@ -4,18 +4,16 @@ mod toolbar_controls;
#[cfg(test)]
mod diagnostics_tests;
pub(crate) mod grouped_diagnostics;
use anyhow::Result;
use collections::{BTreeSet, HashSet};
use editor::{
diagnostic_block_renderer,
display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock},
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
highlight_diagnostic_message,
scroll::Autoscroll,
Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
};
use feature_flags::FeatureFlagAppExt;
use futures::{
channel::mpsc::{self, UnboundedSender},
StreamExt as _,
@@ -45,7 +43,7 @@ use ui::{h_flex, prelude::*, Icon, IconName, Label};
use util::ResultExt;
use workspace::{
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
ItemNavHistory, ToolbarItemLocation, Workspace,
ItemNavHistory, Pane, ToolbarItemLocation, Workspace,
};
actions!(diagnostics, [Deploy, ToggleWarnings]);
@@ -54,9 +52,6 @@ pub fn init(cx: &mut AppContext) {
ProjectDiagnosticsSettings::register(cx);
cx.observe_new_views(ProjectDiagnosticsEditor::register)
.detach();
if !cx.has_flag::<feature_flags::GroupedDiagnostics>() {
grouped_diagnostics::init(cx);
}
}
struct ProjectDiagnosticsEditor {
@@ -85,7 +80,7 @@ struct DiagnosticGroupState {
primary_diagnostic: DiagnosticEntry<language::Anchor>,
primary_excerpt_ix: usize,
excerpts: Vec<ExcerptId>,
blocks: HashSet<CustomBlockId>,
blocks: HashSet<BlockId>,
block_count: usize,
}
@@ -237,13 +232,13 @@ impl ProjectDiagnosticsEditor {
fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
if let Some(existing) = workspace.item_of_type::<ProjectDiagnosticsEditor>(cx) {
workspace.activate_item(&existing, true, true, cx);
workspace.activate_item(&existing, cx);
} else {
let workspace_handle = cx.view().downgrade();
let diagnostics = cx.new_view(|cx| {
ProjectDiagnosticsEditor::new(workspace.project().clone(), workspace_handle, cx)
});
workspace.add_item_to_active_pane(Box::new(diagnostics), None, true, cx);
workspace.add_item_to_active_pane(Box::new(diagnostics), None, cx);
}
}
@@ -471,9 +466,7 @@ impl ProjectDiagnosticsEditor {
position: (excerpt_id, entry.range.start),
height: diagnostic.message.matches('\n').count() as u8 + 1,
style: BlockStyle::Fixed,
render: diagnostic_block_renderer(
diagnostic, None, true, true,
),
render: diagnostic_block_renderer(diagnostic, true),
disposition: BlockDisposition::Below,
});
}
@@ -649,7 +642,11 @@ impl Item for ProjectDiagnosticsEditor {
fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement {
if self.summary.error_count == 0 && self.summary.warning_count == 0 {
Label::new("No problems")
.color(params.text_color())
.color(if params.selected {
Color::Default
} else {
Color::Muted
})
.into_any_element()
} else {
h_flex()
@@ -659,10 +656,13 @@ impl Item for ProjectDiagnosticsEditor {
h_flex()
.gap_1()
.child(Icon::new(IconName::XCircle).color(Color::Error))
.child(
Label::new(self.summary.error_count.to_string())
.color(params.text_color()),
),
.child(Label::new(self.summary.error_count.to_string()).color(
if params.selected {
Color::Default
} else {
Color::Muted
},
)),
)
})
.when(self.summary.warning_count > 0, |then| {
@@ -670,10 +670,13 @@ impl Item for ProjectDiagnosticsEditor {
h_flex()
.gap_1()
.child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning))
.child(
Label::new(self.summary.warning_count.to_string())
.color(params.text_color()),
),
.child(Label::new(self.summary.warning_count.to_string()).color(
if params.selected {
Color::Default
} else {
Color::Muted
},
)),
)
})
.into_any_element()
@@ -776,12 +779,26 @@ impl Item for ProjectDiagnosticsEditor {
self.editor
.update(cx, |editor, cx| editor.added_to_workspace(workspace, cx));
}
fn serialized_item_kind() -> Option<&'static str> {
Some("diagnostics")
}
fn deserialize(
project: Model<Project>,
workspace: WeakView<Workspace>,
_workspace_id: workspace::WorkspaceId,
_item_id: workspace::ItemId,
cx: &mut ViewContext<Pane>,
) -> Task<Result<View<Self>>> {
Task::ready(Ok(cx.new_view(|cx| Self::new(project, workspace, cx))))
}
}
const DIAGNOSTIC_HEADER: &'static str = "diagnostic header";
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic, None);
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic);
let message: SharedString = message;
Box::new(move |cx| {
let highlight_style: HighlightStyle = cx.theme().colors().text_accent.into();

Some files were not shown because too many files have changed in this diff Show More