Compare commits
223 Commits
buffer-fon
...
assistant-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4740982a64 | ||
|
|
1cd34fdd9c | ||
|
|
530224527d | ||
|
|
0de2636324 | ||
|
|
7ec963664e | ||
|
|
019821d62c | ||
|
|
bb213b6e37 | ||
|
|
6a7761e620 | ||
|
|
031580f4dc | ||
|
|
1a27016123 | ||
|
|
d1425603f6 | ||
|
|
583a662ddc | ||
|
|
64617a0ede | ||
|
|
b673494f4d | ||
|
|
53f67a8241 | ||
|
|
06d2d9da5f | ||
|
|
9e88155a48 | ||
|
|
048fc7ad09 | ||
|
|
bd77232f65 | ||
|
|
facd04c902 | ||
|
|
d8437136c7 | ||
|
|
d0a5dbd8cb | ||
|
|
76ff467965 | ||
|
|
e1791b7dd0 | ||
|
|
25e239d986 | ||
|
|
f7ea1370a4 | ||
|
|
6108140a02 | ||
|
|
135a5f2114 | ||
|
|
dfd4d2a437 | ||
|
|
fbc6e930a7 | ||
|
|
af5a9fabc6 | ||
|
|
25981550d5 | ||
|
|
cf67fc9055 | ||
|
|
68a1ad89bb | ||
|
|
e0c83a1d32 | ||
|
|
8ae4c3277f | ||
|
|
f6eaa8b00f | ||
|
|
85b26e9788 | ||
|
|
2ee257a562 | ||
|
|
bcbf2f2fd3 | ||
|
|
efcd31c254 | ||
|
|
ae3c641bbe | ||
|
|
029eb67043 | ||
|
|
63c529552c | ||
|
|
c96a96b3ce | ||
|
|
7f81bfb6b7 | ||
|
|
33baa377c7 | ||
|
|
07f490f9e9 | ||
|
|
b29643168c | ||
|
|
e9a965fe81 | ||
|
|
b964fe2ccf | ||
|
|
1be452744a | ||
|
|
d298df823f | ||
|
|
a111b959d2 | ||
|
|
189cece03e | ||
|
|
ee531b6f4d | ||
|
|
67e7c33428 | ||
|
|
615de381da | ||
|
|
74241d9f93 | ||
|
|
dd41c10099 | ||
|
|
a0fa8a489b | ||
|
|
e1685deb29 | ||
|
|
4ab48c689f | ||
|
|
2677ec7568 | ||
|
|
cd6acff635 | ||
|
|
5102e37a5b | ||
|
|
fee2065b64 | ||
|
|
c3bcfb374c | ||
|
|
8a02159b82 | ||
|
|
9d9bce08a7 | ||
|
|
247b0317b9 | ||
|
|
f082344747 | ||
|
|
70427daed2 | ||
|
|
13c17267b9 | ||
|
|
9247da77a3 | ||
|
|
37e4f83a78 | ||
|
|
3273f5e404 | ||
|
|
8513a24dd8 | ||
|
|
54699e39e7 | ||
|
|
8fc8309e45 | ||
|
|
222034cacf | ||
|
|
9863b920b0 | ||
|
|
ea952b2a95 | ||
|
|
dd7eced2b6 | ||
|
|
d4922eb10b | ||
|
|
95827d4c49 | ||
|
|
2602fc47bb | ||
|
|
6d1ea782a4 | ||
|
|
870a61dd4d | ||
|
|
250b71fb44 | ||
|
|
15c4c4a308 | ||
|
|
b31df39ab0 | ||
|
|
98db7fa61e | ||
|
|
bd5473a582 | ||
|
|
1fbc04104c | ||
|
|
2f892e3523 | ||
|
|
5c3e5cc45d | ||
|
|
11a3d2b04b | ||
|
|
1127b1a0de | ||
|
|
c55055599a | ||
|
|
a202499c9a | ||
|
|
c2428f9f5d | ||
|
|
d5c5394693 | ||
|
|
bb97432e9a | ||
|
|
1b75f9d620 | ||
|
|
4c3178e7a8 | ||
|
|
41c8f2caa6 | ||
|
|
b9e0269991 | ||
|
|
4f2214e1d6 | ||
|
|
e25f0dfb0a | ||
|
|
3c805d4c6b | ||
|
|
4f1861edb6 | ||
|
|
d7becce9aa | ||
|
|
62171387f6 | ||
|
|
47ad010901 | ||
|
|
06987edadb | ||
|
|
1e1a2807db | ||
|
|
9782dd342f | ||
|
|
535bcfad10 | ||
|
|
c76bacb974 | ||
|
|
20554d0296 | ||
|
|
2c78cf349b | ||
|
|
c81eb419d4 | ||
|
|
c4e446f8a8 | ||
|
|
bc7eaa6cd5 | ||
|
|
e93d554725 | ||
|
|
775539b3fa | ||
|
|
545319bced | ||
|
|
0b2de51c37 | ||
|
|
9a680dafc3 | ||
|
|
4c35cfaa69 | ||
|
|
be2bf98529 | ||
|
|
4eb1e65fbb | ||
|
|
52591905fb | ||
|
|
d2e83cc148 | ||
|
|
f633460a8d | ||
|
|
9470a52b5d | ||
|
|
fa0302f156 | ||
|
|
5d7148bde1 | ||
|
|
58991f332b | ||
|
|
9c569c8d95 | ||
|
|
1ba0bf925b | ||
|
|
53105ddd16 | ||
|
|
210f8ebfed | ||
|
|
c015b5c4cd | ||
|
|
c1c8a74c7f | ||
|
|
2f00fcbdf6 | ||
|
|
5c5fb972d0 | ||
|
|
7928095951 | ||
|
|
70c3ca4fdd | ||
|
|
d49271a112 | ||
|
|
e34c443331 | ||
|
|
263023021d | ||
|
|
7e1a184446 | ||
|
|
c834ea75ef | ||
|
|
4d8cba2add | ||
|
|
08aef198d5 | ||
|
|
2cfb1ffa77 | ||
|
|
f3192b6fa6 | ||
|
|
33b9aca090 | ||
|
|
57b087e41e | ||
|
|
2a9ce3cec3 | ||
|
|
f5c2483423 | ||
|
|
4d314b2dd0 | ||
|
|
7a112b22ac | ||
|
|
575eb792fb | ||
|
|
4f776f9ebe | ||
|
|
0c77e1ce45 | ||
|
|
904b740e16 | ||
|
|
f2fc84ab44 | ||
|
|
fda3c91f16 | ||
|
|
3eb8464d19 | ||
|
|
58f57491b1 | ||
|
|
3e44e97177 | ||
|
|
fda21232ae | ||
|
|
57a736d74a | ||
|
|
015e2ecd19 | ||
|
|
5037f466f6 | ||
|
|
f28fde5e58 | ||
|
|
d1928f084e | ||
|
|
ad22bddffa | ||
|
|
da0d968a2c | ||
|
|
200e36311c | ||
|
|
db48c75231 | ||
|
|
1911a9f39b | ||
|
|
faebce8cd0 | ||
|
|
573ba83034 | ||
|
|
97c5cffbe3 | ||
|
|
556ecd94c2 | ||
|
|
3289188e0a | ||
|
|
5e4f707951 | ||
|
|
5d7642d77d | ||
|
|
e64ecdc9ab | ||
|
|
ba9c5929af | ||
|
|
ad8dd1771a | ||
|
|
cb6d0639db | ||
|
|
065f15e9a6 | ||
|
|
104558115f | ||
|
|
4e6f24a841 | ||
|
|
f3a78f613a | ||
|
|
8bca9cea26 | ||
|
|
28586060a1 | ||
|
|
49371b44cb | ||
|
|
4b40e83b8b | ||
|
|
dffddaec4c | ||
|
|
a4d6c5da7c | ||
|
|
3ea17248c8 | ||
|
|
e0e1103228 | ||
|
|
65c9e7d3d1 | ||
|
|
b5b872656b | ||
|
|
f4d9a97195 | ||
|
|
7b01a29f5a | ||
|
|
04e89c4c51 | ||
|
|
0ab5a524b0 | ||
|
|
cd5ddfe34b | ||
|
|
0a4c3488dd | ||
|
|
a1cbc23fee | ||
|
|
298e9c9387 | ||
|
|
6e1ba7e936 | ||
|
|
bc0c2e0cae | ||
|
|
29a50573a9 | ||
|
|
08786fa7bf | ||
|
|
f2d61f3ea5 |
49
.github/workflows/bump_patch_version.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: bump_patch_version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Branch name to run on"
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.event.input.branch }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
bump_patch_version:
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}
|
||||
|
||||
- name: Bump Patch Version
|
||||
run: |
|
||||
set -eux
|
||||
|
||||
channel=$(cat crates/zed/RELEASE_CHANNEL)
|
||||
|
||||
tag_suffix=""
|
||||
case $channel in
|
||||
stable)
|
||||
;;
|
||||
preview)
|
||||
tag_suffix="-pre"
|
||||
;;
|
||||
*)
|
||||
echo "this must be run on either of stable|preview release branches" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl
|
||||
output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')
|
||||
git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot <hi@zed.dev>"
|
||||
git tag v${output}${tag_suffix}
|
||||
git push origin HEAD v${output}${tag_suffix}
|
||||
10
.github/workflows/ci.yml
vendored
@@ -173,6 +173,11 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
#
|
||||
# 25 was chosen arbitrarily.
|
||||
fetch-depth: 25
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
@@ -205,6 +210,9 @@ jobs:
|
||||
echo "invalid release tag ${GITHUB_REF_NAME}. expected ${expected_tag_name}"
|
||||
exit 1
|
||||
fi
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$version" "$channel" > target/release-notes.md || true
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
@@ -248,7 +256,7 @@ jobs:
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
target/release/Zed.dmg
|
||||
body: ""
|
||||
body_file: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
2
.gitignore
vendored
@@ -6,7 +6,7 @@
|
||||
/plugins/bin
|
||||
/script/node_modules
|
||||
/crates/theme/schemas/theme.json
|
||||
/crates/collab/.admins.json
|
||||
/crates/collab/seed.json
|
||||
/assets/*licenses.md
|
||||
**/venv
|
||||
.build
|
||||
|
||||
@@ -21,5 +21,7 @@
|
||||
"formatter": "prettier"
|
||||
}
|
||||
},
|
||||
"formatter": "auto"
|
||||
"formatter": "auto",
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
"ensure_final_newline_on_save": true
|
||||
}
|
||||
|
||||
@@ -3,5 +3,10 @@
|
||||
"label": "clippy",
|
||||
"command": "cargo",
|
||||
"args": ["xtask", "clippy"]
|
||||
},
|
||||
{
|
||||
"label": "assistant2",
|
||||
"command": "cargo",
|
||||
"args": ["run", "-p", "assistant2", "--example", "assistant_example"]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -11,7 +11,7 @@ If you're looking for ideas about what to work on, check out:
|
||||
- Our [public roadmap](https://zed.dev/roadmap) contains a rough outline of our near-term priorities for Zed.
|
||||
- Our [top-ranking issues](https://github.com/zed-industries/zed/issues/5393) based on votes by the community.
|
||||
|
||||
Outside of a handful of extremely popular languages and themes, we are generally not looking to extend Zed's language or theme support by directly building them into Zed. We really want to build a plugin system to handle making the editor extensible going forward. If you are passionate about shipping new languages or themes we suggest contributing to the extension system to help us get there faster.
|
||||
For adding themes or support for a new language to Zed, check out our [extension docs](https://github.com/zed-industries/extensions/blob/main/AUTHORING_EXTENSIONS.md).
|
||||
|
||||
## Proposing changes
|
||||
|
||||
|
||||
590
Cargo.lock
generated
35
Cargo.toml
@@ -4,6 +4,8 @@ members = [
|
||||
"crates/anthropic",
|
||||
"crates/assets",
|
||||
"crates/assistant",
|
||||
"crates/assistant_tooling",
|
||||
"crates/assistant2",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
@@ -67,12 +69,14 @@ members = [
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/release_channel",
|
||||
"crates/remote_projects",
|
||||
"crates/rich_text",
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
"crates/task",
|
||||
"crates/tasks_ui",
|
||||
"crates/search",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_version",
|
||||
"crates/settings",
|
||||
"crates/snippet",
|
||||
@@ -105,10 +109,12 @@ members = [
|
||||
"extensions/clojure",
|
||||
"extensions/csharp",
|
||||
"extensions/dart",
|
||||
"extensions/deno",
|
||||
"extensions/elm",
|
||||
"extensions/emmet",
|
||||
"extensions/erlang",
|
||||
"extensions/gleam",
|
||||
"extensions/glsl",
|
||||
"extensions/haskell",
|
||||
"extensions/html",
|
||||
"extensions/lua",
|
||||
@@ -117,8 +123,10 @@ members = [
|
||||
"extensions/prisma",
|
||||
"extensions/purescript",
|
||||
"extensions/svelte",
|
||||
"extensions/terraform",
|
||||
"extensions/toml",
|
||||
"extensions/uiua",
|
||||
"extensions/vue",
|
||||
"extensions/zig",
|
||||
|
||||
"tooling/xtask",
|
||||
@@ -132,6 +140,8 @@ ai = { path = "crates/ai" }
|
||||
anthropic = { path = "crates/anthropic" }
|
||||
assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant2 = { path = "crates/assistant2" }
|
||||
assistant_tooling = { path = "crates/assistant_tooling" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
base64 = "0.13"
|
||||
@@ -196,12 +206,14 @@ project_symbols = { path = "crates/project_symbols" }
|
||||
quick_action_bar = { path = "crates/quick_action_bar" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
remote_projects = { path = "crates/remote_projects" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
task = { path = "crates/task" }
|
||||
tasks_ui = { path = "crates/tasks_ui" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
settings = { path = "crates/settings" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
@@ -237,9 +249,8 @@ async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
bitflags = "2.4.2"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
|
||||
blade-rwh = { package = "raw-window-handle", version = "0.5" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e82eec97691c3acdb43494484be60d661edfebf3" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e82eec97691c3acdb43494484be60d661edfebf3" }
|
||||
cap-std = "3.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
@@ -252,9 +263,13 @@ derive_more = "0.99.17"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.9"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
git2 = { version = "0.15", default-features = false }
|
||||
git2 = { version = "0.18", default-features = false }
|
||||
globset = "0.4"
|
||||
heed = { git = "https://github.com/meilisearch/heed", rev = "036ac23f73a021894974b9adc815bc95b3e0482a", features = [
|
||||
"read-txn-no-tls",
|
||||
] }
|
||||
hex = "0.4.3"
|
||||
ignore = "0.4.22"
|
||||
indoc = "1"
|
||||
@@ -322,28 +337,24 @@ tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
|
||||
tree-sitter-hcl = { git = "https://github.com/MichaHoffmann/tree-sitter-hcl", rev = "v1.1.0" }
|
||||
rustc-demangle = "0.1.23"
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", ref = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" }
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "7dd29f9616822e5fc259f5b4ae6c4ded9a71a132" }
|
||||
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-regex = "0.20.0"
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-rust = "0.20.3"
|
||||
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9" }
|
||||
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
|
||||
tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42" }
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
|
||||
unindent = "0.1.7"
|
||||
unicase = "2.6"
|
||||
unicode-segmentation = "1.10"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5"] }
|
||||
wasmparser = "0.201"
|
||||
wasm-encoder = "0.201"
|
||||
wasmtime = { version = "19.0.0", default-features = false, features = [
|
||||
@@ -362,10 +373,16 @@ sys-locale = "0.3.1"
|
||||
version = "0.53.0"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Numerics",
|
||||
"Wdk_System_SystemServices",
|
||||
"Win32_Globalization",
|
||||
"Win32_Graphics_Direct2D",
|
||||
"Win32_Graphics_Direct2D_Common",
|
||||
"Win32_Graphics_DirectWrite",
|
||||
"Win32_Graphics_Dxgi_Common",
|
||||
"Win32_Graphics_Gdi",
|
||||
"Win32_Graphics_Imaging",
|
||||
"Win32_Graphics_Imaging_D2D",
|
||||
"Win32_Media",
|
||||
"Win32_Security",
|
||||
"Win32_Security_Credentials",
|
||||
|
||||
2
Procfile
@@ -1,3 +1,3 @@
|
||||
collab: RUST_LOG=${RUST_LOG:-warn,tower_http=info,collab=info} cargo run --package=collab serve
|
||||
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve
|
||||
livekit: livekit-server --dev
|
||||
blob_store: ./script/run-local-minio
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Zed
|
||||
|
||||
[](https://github.com/zed-industries/ze34actions/workflows/ci.yml)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
@@ -38,6 +38,8 @@ brew install zed-preview
|
||||
|
||||
See [CONTRIBUTING.md](./CONTRIBUTING.md) for ways you can contribute to Zed.
|
||||
|
||||
Also... we're hiring! Check out our [jobs](https://zed.dev/jobs) page for open roles.
|
||||
|
||||
## Licensing
|
||||
|
||||
License information for third party dependencies must be correctly provided for CI to pass.
|
||||
|
||||
9
assets/icons/LICENSES
Normal file
@@ -0,0 +1,9 @@
|
||||
Lucide License
|
||||
|
||||
ISC License
|
||||
|
||||
Copyright (c) for portions of Lucide are held by Cole Bemis 2013-2022 as part of Feather (MIT). All other copyright (c) for Lucide are held by Lucide Contributors 2022.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
1
assets/icons/expand_vertical.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-unfold-vertical"><path d="M12 22v-6"/><path d="M12 8V2"/><path d="M4 12H2"/><path d="M10 12H8"/><path d="M16 12h-2"/><path d="M22 12h-2"/><path d="m15 19-3 3-3-3"/><path d="m15 5-3-3-3 3"/></svg>
|
||||
|
After Width: | Height: | Size: 398 B |
@@ -161,6 +161,8 @@
|
||||
"webp": "image",
|
||||
"wma": "audio",
|
||||
"wmv": "video",
|
||||
"woff": "font",
|
||||
"woff2": "font",
|
||||
"wv": "audio",
|
||||
"xls": "document",
|
||||
"xlsx": "document",
|
||||
@@ -327,7 +329,7 @@
|
||||
},
|
||||
"tcl": {
|
||||
"icon": "icons/file_icons/tcl.svg"
|
||||
},
|
||||
},
|
||||
"vcs": {
|
||||
"icon": "icons/file_icons/git.svg"
|
||||
},
|
||||
|
||||
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-mail-open"><path d="M21.2 8.4c.5.38.8.97.8 1.6v10a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V10a2 2 0 0 1 .8-1.6l8-6a2 2 0 0 1 2.4 0l8 6Z"/><path d="m22 10-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 10"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-mail-open"><path d="M21.2 8.4c.5.38.8.97.8 1.6v10a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2V10a2 2 0 0 1 .8-1.6l8-6a2 2 0 0 1 2.4 0l8 6Z"/><path d="m22 10-8.97 5.7a1.94 1.94 0 0 1-2.06 0L2 10"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 390 B After Width: | Height: | Size: 391 B |
3
assets/icons/person.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.5 0.875C5.49797 0.875 3.875 2.49797 3.875 4.5C3.875 6.15288 4.98124 7.54738 6.49373 7.98351C5.2997 8.12901 4.27557 8.55134 3.50407 9.31167C2.52216 10.2794 2.02502 11.72 2.02502 13.5999C2.02502 13.8623 2.23769 14.0749 2.50002 14.0749C2.76236 14.0749 2.97502 13.8623 2.97502 13.5999C2.97502 11.8799 3.42786 10.7206 4.17091 9.9883C4.91536 9.25463 6.02674 8.87499 7.49995 8.87499C8.97317 8.87499 10.0846 9.25463 10.8291 9.98831C11.5721 10.7206 12.025 11.8799 12.025 13.5999C12.025 13.8623 12.2376 14.0749 12.5 14.0749C12.7623 14.075 12.975 13.8623 12.975 13.6C12.975 11.72 12.4778 10.2794 11.4959 9.31166C10.7244 8.55135 9.70025 8.12903 8.50625 7.98352C10.0187 7.5474 11.125 6.15289 11.125 4.5C11.125 2.49797 9.50203 0.875 7.5 0.875ZM4.825 4.5C4.825 3.02264 6.02264 1.825 7.5 1.825C8.97736 1.825 10.175 3.02264 10.175 4.5C10.175 5.97736 8.97736 7.175 7.5 7.175C6.02264 7.175 4.825 5.97736 4.825 4.5Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
1
assets/icons/pull_request.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-git-pull-request-arrow"><circle cx="5" cy="6" r="3"/><path d="M5 9v12"/><circle cx="19" cy="18" r="3"/><path d="m15 9-3-3 3-3"/><path d="M12 6h5a2 2 0 0 1 2 2v7"/></svg>
|
||||
|
After Width: | Height: | Size: 372 B |
@@ -1,5 +1,16 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.99993 6.85713C11.1558 6.85713 13.7142 5.83379 13.7142 4.57142C13.7142 3.30905 11.1558 2.28571 7.99993 2.28571C4.84402 2.28571 2.28564 3.30905 2.28564 4.57142C2.28564 5.83379 4.84402 6.85713 7.99993 6.85713Z" fill="black" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M13.7142 4.57141V11.4286C13.7142 12.691 11.1558 13.7143 7.99993 13.7143C4.84402 13.7143 2.28564 12.691 2.28564 11.4286V4.57141" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M13.7142 8C13.7142 9.26237 11.1558 10.2857 7.99993 10.2857C4.84402 10.2857 2.28564 9.26237 2.28564 8" stroke="black" stroke-width="1.5"/>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
>
|
||||
<rect width="20" height="8" x="2" y="2" rx="2" ry="2" />
|
||||
<rect width="20" height="8" x="2" y="14" rx="2" ry="2" />
|
||||
<line x1="6" x2="6.01" y1="6" y2="6" />
|
||||
<line x1="6" x2="6.01" y1="18" y2="18" />
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 692 B After Width: | Height: | Size: 413 B |
3
assets/icons/sliders.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.36667 3.79167C5.53364 3.79167 4.85833 4.46697 4.85833 5.3C4.85833 6.13303 5.53364 6.80833 6.36667 6.80833C7.1997 6.80833 7.875 6.13303 7.875 5.3C7.875 4.46697 7.1997 3.79167 6.36667 3.79167ZM2.1 5.925H3.67944C3.9626 7.14732 5.05824 8.05833 6.36667 8.05833C7.67509 8.05833 8.77073 7.14732 9.05389 5.925H14.9C15.2452 5.925 15.525 5.64518 15.525 5.3C15.525 4.95482 15.2452 4.675 14.9 4.675H9.05389C8.77073 3.45268 7.67509 2.54167 6.36667 2.54167C5.05824 2.54167 3.9626 3.45268 3.67944 4.675H2.1C1.75482 4.675 1.475 4.95482 1.475 5.3C1.475 5.64518 1.75482 5.925 2.1 5.925ZM13.3206 12.325C13.0374 13.5473 11.9418 14.4583 10.6333 14.4583C9.32491 14.4583 8.22927 13.5473 7.94611 12.325H2.1C1.75482 12.325 1.475 12.0452 1.475 11.7C1.475 11.3548 1.75482 11.075 2.1 11.075H7.94611C8.22927 9.85268 9.32491 8.94167 10.6333 8.94167C11.9418 8.94167 13.0374 9.85268 13.3206 11.075H14.9C15.2452 11.075 15.525 11.3548 15.525 11.7C15.525 12.0452 15.2452 12.325 14.9 12.325H13.3206ZM9.125 11.7C9.125 10.867 9.8003 10.1917 10.6333 10.1917C11.4664 10.1917 12.1417 10.867 12.1417 11.7C12.1417 12.533 11.4664 13.2083 10.6333 13.2083C9.8003 13.2083 9.125 12.533 9.125 11.7Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 409 B After Width: | Height: | Size: 410 B |
@@ -3,4 +3,3 @@
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.64907 9.32382C8.313 9.13287 8.08213 8.81954 7.94725 8.4078C7.8147 8.00318 7.75317 7.44207 7.75317 6.73677C7.75317 6.03845 7.81141 5.48454 7.9369 5.08716L7.93755 5.08512C8.07231 4.67373 8.3034 4.36258 8.64088 4.17794C8.96806 3.99257 9.41119 3.9104 9.9496 3.9104C10.3406 3.9104 10.6632 3.95585 10.8967 4.06485C11.0079 4.11675 11.1099 4.18844 11.2033 4.27745V2.03027H12.4077V9.4856H11.2033V9.18983C11.0945 9.29074 10.98 9.37096 10.8591 9.42752C10.6327 9.53648 10.3335 9.58252 9.97867 9.58252C9.4339 9.58252 8.98592 9.50355 8.65375 9.3264L8.64907 9.32382ZM11.1139 7.85508C11.1841 7.60311 11.2227 7.23354 11.2227 6.73677C11.2227 6.24602 11.1841 5.88331 11.1141 5.63844C11.0457 5.39902 10.9401 5.25863 10.8149 5.18266L10.8077 5.17826C10.6804 5.09342 10.4713 5.03726 10.1531 5.03726C9.80785 5.03726 9.5719 5.09359 9.42256 5.1832L9.41829 5.18576C9.28002 5.26412 9.16722 5.40602 9.09399 5.64263C9.01876 5.88566 8.97694 6.24668 8.97694 6.73677C8.97694 7.23363 9.01882 7.59774 9.09399 7.8406C9.1673 8.07745 9.28097 8.22477 9.42256 8.30972C9.5719 8.39933 9.80785 8.45566 10.1531 8.45566C10.4721 8.45566 10.683 8.40265 10.8114 8.32216C10.9396 8.23944 11.0456 8.09373 11.1139 7.85508Z" fill="#787D87"/>
|
||||
<rect x="1.14087" y="10.7188" width="11.7183" height="1.26565" rx="0.632824" fill="#787D87"/>
|
||||
</svg>
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
@@ -297,13 +297,8 @@
|
||||
"ctrl-shift-k": "editor::DeleteLine",
|
||||
"alt-up": "editor::MoveLineUp",
|
||||
"alt-down": "editor::MoveLineDown",
|
||||
"ctrl-alt-shift-up": [
|
||||
"editor::DuplicateLine",
|
||||
{
|
||||
"move_upwards": true
|
||||
}
|
||||
],
|
||||
"ctrl-alt-shift-down": "editor::DuplicateLine",
|
||||
"ctrl-alt-shift-up": "editor::DuplicateLineUp",
|
||||
"ctrl-alt-shift-down": "editor::DuplicateLineDown",
|
||||
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
|
||||
"ctrl-shift-right": "editor::SelectToNextWordEnd",
|
||||
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
|
||||
@@ -527,6 +522,7 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::OpenExcerpts",
|
||||
"shift-enter": "editor::ExpandExcerpts",
|
||||
"ctrl-k enter": "editor::OpenExcerptsSplit",
|
||||
"ctrl-f8": "editor::GoToHunk",
|
||||
"ctrl-shift-f8": "editor::GoToPrevHunk",
|
||||
@@ -592,12 +588,6 @@
|
||||
"tab": "channel_modal::ToggleMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ChatPanel > MessageEditor",
|
||||
"bindings": {
|
||||
"escape": "chat_panel::CloseReplyPreview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": { "ctrl-shift-p": "file_finder::SelectPrev" }
|
||||
|
||||
@@ -209,7 +209,15 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AssistantPanel",
|
||||
"context": "AssistantChat > Editor", // Used in the assistant2 crate
|
||||
"bindings": {
|
||||
"enter": ["assistant2::Submit", "Simple"],
|
||||
"cmd-enter": ["assistant2::Submit", "Codebase"],
|
||||
"escape": "assistant2::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AssistantPanel", // Used in the assistant crate, which we're replacing
|
||||
"bindings": {
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch"
|
||||
@@ -541,6 +549,7 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"alt-enter": "editor::OpenExcerpts",
|
||||
"shift-enter": "editor::ExpandExcerpts",
|
||||
"cmd-k enter": "editor::OpenExcerptsSplit",
|
||||
"cmd-f8": "editor::GoToHunk",
|
||||
"cmd-shift-f8": "editor::GoToPrevHunk",
|
||||
|
||||
@@ -234,6 +234,8 @@
|
||||
"displayLines": true
|
||||
}
|
||||
],
|
||||
"g ]": "editor::GoToDiagnostic",
|
||||
"g [": "editor::GoToPrevDiagnostic",
|
||||
"shift-h": "vim::WindowTop",
|
||||
"shift-m": "vim::WindowMiddle",
|
||||
"shift-l": "vim::WindowBottom",
|
||||
@@ -367,6 +369,15 @@
|
||||
"< <": "vim::Outdent",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == visual && vim_operator == none && !VimWaiting",
|
||||
"bindings": {
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode"
|
||||
}
|
||||
@@ -532,6 +543,18 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == normal",
|
||||
"bindings": {
|
||||
"g c c": "editor::ToggleComments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == visual",
|
||||
"bindings": {
|
||||
"g c": "editor::ToggleComments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == insert",
|
||||
"bindings": {
|
||||
@@ -590,17 +613,18 @@
|
||||
"%": "project_panel::NewFile",
|
||||
"/": "project_panel::NewSearchInDirectory",
|
||||
"d": "project_panel::NewDirectory",
|
||||
"enter": "project_panel::Open",
|
||||
"enter": "project_panel::OpenPermanent",
|
||||
"escape": "project_panel::ToggleFocus",
|
||||
"h": "project_panel::CollapseSelectedEntry",
|
||||
"j": "menu::SelectNext",
|
||||
"k": "menu::SelectPrev",
|
||||
"l": "project_panel::ExpandSelectedEntry",
|
||||
"o": "project_panel::Open",
|
||||
"o": "project_panel::OpenPermanent",
|
||||
"shift-d": "project_panel::Delete",
|
||||
"shift-r": "project_panel::Rename",
|
||||
"t": "project_panel::Open",
|
||||
"v": "project_panel::Open",
|
||||
"t": "project_panel::OpenPermanent",
|
||||
"v": "project_panel::OpenPermanent",
|
||||
"p": "project_panel::Open",
|
||||
"x": "project_panel::RevealInFinder"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,11 +47,20 @@
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"active_pane_magnification": 1.0,
|
||||
// Centered layout related settings.
|
||||
"centered_layout": {
|
||||
// The relative width of the left padding of the central pane from the
|
||||
// workspace when the centered layout is used.
|
||||
"left_padding": 0.2,
|
||||
// The relative width of the right padding of the central pane from the
|
||||
// workspace when the centered layout is used.
|
||||
"right_padding": 0.2
|
||||
},
|
||||
// The key to use for adding multiple cursors
|
||||
// Currently "alt" or "cmd_or_ctrl" (also aliased as
|
||||
// "cmd" and "ctrl") are supported.
|
||||
"multi_cursor_modifier": "alt",
|
||||
// Whether to enable vim modes and key bindings
|
||||
// Whether to enable vim modes and key bindings.
|
||||
"vim_mode": false,
|
||||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
@@ -60,6 +69,8 @@
|
||||
"confirm_quit": false,
|
||||
// Whether to restore last closed project when fresh Zed instance is opened.
|
||||
"restore_on_startup": "last_workspace",
|
||||
// Size of the drop target in the editor.
|
||||
"drop_target_size": 0.2,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
@@ -92,8 +103,9 @@
|
||||
// Whether to use additional LSP queries to format (and amend) the code after
|
||||
// every "trigger" symbol input, defined by LSP server capabilities.
|
||||
"use_on_type_format": true,
|
||||
// Whether to automatically type closing characters for you. For example,
|
||||
// when you type (, Zed will automatically add a closing ) at the correct position.
|
||||
// Whether to automatically add matching closing characters when typing
|
||||
// opening parenthesis, bracket, brace, single or double quote characters.
|
||||
// For example, when you type (, Zed will add a closing ) at the correct position.
|
||||
"use_autoclose": true,
|
||||
// Controls how the editor handles the autoclosed characters.
|
||||
// When set to `false`(default), skipping over and auto-removing of the closing characters
|
||||
@@ -145,10 +157,10 @@
|
||||
"show": "auto",
|
||||
// Whether to show git diff indicators in the scrollbar.
|
||||
"git_diff": true,
|
||||
// Whether to show selections in the scrollbar.
|
||||
"selections": true,
|
||||
// Whether to show symbols selections in the scrollbar.
|
||||
"symbols_selections": true,
|
||||
// Whether to show buffer search results in the scrollbar.
|
||||
"search_results": true,
|
||||
// Whether to show selected symbol occurrences in the scrollbar.
|
||||
"selected_symbol": true,
|
||||
// Whether to show diagnostic indicators in the scrollbar.
|
||||
"diagnostics": true
|
||||
},
|
||||
@@ -171,6 +183,9 @@
|
||||
},
|
||||
// The number of lines to keep above/below the cursor when scrolling.
|
||||
"vertical_scroll_margin": 3,
|
||||
// Scroll sensitivity multiplier. This multiplier is applied
|
||||
// to both the horizontal and vertical delta values while scrolling.
|
||||
"scroll_sensitivity": 1.0,
|
||||
"relative_line_numbers": false,
|
||||
// When to populate a new search's query based on the text under the cursor.
|
||||
// This setting can take the following three values:
|
||||
@@ -199,6 +214,8 @@
|
||||
"scroll_debounce_ms": 50
|
||||
},
|
||||
"project_panel": {
|
||||
// Whether to show the project panel button in the status bar
|
||||
"button": true,
|
||||
// Default width of the project panel.
|
||||
"default_width": 240,
|
||||
// Where to dock the project panel. Can be 'left' or 'right'.
|
||||
@@ -214,7 +231,10 @@
|
||||
// Whether to reveal it in the project panel automatically,
|
||||
// when a corresponding project entry becomes active.
|
||||
// Gitignored entries are never auto revealed.
|
||||
"auto_reveal_entries": true
|
||||
"auto_reveal_entries": true,
|
||||
/// Whether to fold directories automatically
|
||||
/// when a directory has only one directory inside.
|
||||
"auto_fold_dirs": false
|
||||
},
|
||||
"collaboration_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
@@ -274,6 +294,10 @@
|
||||
"show_call_status_icon": true,
|
||||
// Whether to use language servers to provide code intelligence.
|
||||
"enable_language_server": true,
|
||||
// The list of language servers to use (or disable) for all languages.
|
||||
//
|
||||
// This is typically customized on a per-language basis.
|
||||
"language_servers": ["..."],
|
||||
// When to automatically save edited buffers. This setting can
|
||||
// take four values.
|
||||
//
|
||||
@@ -387,7 +411,15 @@
|
||||
// "git_gutter": "tracked_files"
|
||||
// 2. Hide the gutter
|
||||
// "git_gutter": "hide"
|
||||
"git_gutter": "tracked_files"
|
||||
"git_gutter": "tracked_files",
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
"enabled": true
|
||||
// Sets a delay after which the inline blame information is shown.
|
||||
// Delay is restarted with every cursor movement.
|
||||
// "delay_ms": 600
|
||||
}
|
||||
},
|
||||
"copilot": {
|
||||
// The set of glob patterns for which copilot should be disabled
|
||||
@@ -476,6 +508,8 @@
|
||||
// Whether or not selecting text in the terminal will automatically
|
||||
// copy to the system clipboard.
|
||||
"copy_on_select": false,
|
||||
// Whether to show the terminal button in the status bar
|
||||
"button": true,
|
||||
// Any key-value pairs added to this list will be added to the terminal's
|
||||
// environment. Use `:` to separate multiple values.
|
||||
"env": {
|
||||
@@ -542,10 +576,6 @@
|
||||
//
|
||||
"lsp": "elixir_ls"
|
||||
},
|
||||
// Settings specific to our deno integration
|
||||
"deno": {
|
||||
"enable": false
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
@@ -560,6 +590,13 @@
|
||||
// }
|
||||
//
|
||||
"file_types": {},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
// If you don't want any of these extensions, add this field to your settings
|
||||
// and change the value to `false`.
|
||||
"auto_install_extensions": {
|
||||
"html": true
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"C++": {
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/anthropic.rs"
|
||||
|
||||
@@ -17,6 +20,3 @@ util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tokio.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -5,6 +5,9 @@ edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
path = "src/assets.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build.
|
||||
use anyhow::anyhow;
|
||||
|
||||
use gpui::{AssetSource, Result, SharedString};
|
||||
use gpui::{AppContext, AssetSource, Result, SharedString};
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
@@ -34,3 +34,19 @@ impl AssetSource for Assets {
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl Assets {
|
||||
/// Populate the [`TextSystem`] of the given [`AppContext`] with all `.ttf` fonts in the `fonts` directory.
|
||||
pub fn load_fonts(&self, cx: &AppContext) -> gpui::Result<()> {
|
||||
let font_paths = self.list("fonts")?;
|
||||
let mut embedded_fonts = Vec::new();
|
||||
for font_path in font_paths {
|
||||
if font_path.ends_with(".ttf") {
|
||||
let font_bytes = cx.asset_source().load(&font_path)?;
|
||||
embedded_fonts.push(font_bytes);
|
||||
}
|
||||
}
|
||||
|
||||
cx.text_system().add_fonts(embedded_fonts)
|
||||
}
|
||||
}
|
||||
@@ -128,6 +128,8 @@ impl LanguageModelRequestMessage {
|
||||
Role::System => proto::LanguageModelRole::LanguageModelSystem,
|
||||
} as i32,
|
||||
content: self.content.clone(),
|
||||
tool_calls: Vec::new(),
|
||||
tool_call_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -147,6 +149,8 @@ impl LanguageModelRequest {
|
||||
messages: self.messages.iter().map(|m| m.to_proto()).collect(),
|
||||
stop: self.stop.clone(),
|
||||
temperature: self.temperature,
|
||||
tool_choice: None,
|
||||
tools: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1108,7 +1108,7 @@ impl AssistantPanel {
|
||||
)
|
||||
.track_scroll(scroll_handle)
|
||||
.into_any_element();
|
||||
saved_conversations.layout(
|
||||
saved_conversations.prepaint_as_root(
|
||||
bounds.origin,
|
||||
bounds.size.map(AvailableSpace::Definite),
|
||||
cx,
|
||||
@@ -1119,8 +1119,8 @@ impl AssistantPanel {
|
||||
)
|
||||
.size_full()
|
||||
.into_any_element()
|
||||
} else {
|
||||
let editor = self.active_conversation_editor().unwrap();
|
||||
} else if let Some(editor) = self.active_conversation_editor() {
|
||||
let editor = editor.clone();
|
||||
let conversation = editor.read(cx).conversation.clone();
|
||||
div()
|
||||
.size_full()
|
||||
@@ -1135,6 +1135,8 @@ impl AssistantPanel {
|
||||
.children(self.render_remaining_tokens(&conversation, cx)),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
div().into_any_element()
|
||||
},
|
||||
))
|
||||
}
|
||||
@@ -2065,7 +2067,7 @@ impl ConversationEditor {
|
||||
workspace: workspace.downgrade(),
|
||||
_subscriptions,
|
||||
};
|
||||
this.update_active_buffer(workspace, cx);
|
||||
cx.defer(|this, cx| this.update_active_buffer(workspace, cx));
|
||||
this.update_message_headers(cx);
|
||||
this
|
||||
}
|
||||
|
||||
@@ -140,14 +140,24 @@ impl OpenAiCompletionProvider {
|
||||
messages: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|msg| RequestMessage {
|
||||
role: msg.role.into(),
|
||||
content: msg.content,
|
||||
.map(|msg| match msg.role {
|
||||
Role::User => RequestMessage::User {
|
||||
content: msg.content,
|
||||
},
|
||||
Role::Assistant => RequestMessage::Assistant {
|
||||
content: Some(msg.content),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => RequestMessage::System {
|
||||
content: msg.content,
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
stream: true,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: Vec::new(),
|
||||
tool_choice: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,6 +123,8 @@ impl ZedDotDevCompletionProvider {
|
||||
.collect(),
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: Vec::new(),
|
||||
tool_choice: None,
|
||||
};
|
||||
|
||||
self.client
|
||||
|
||||
57
crates/assistant2/Cargo.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
[package]
|
||||
name = "assistant2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
path = "src/assistant2.rs"
|
||||
|
||||
[[example]]
|
||||
name = "assistant_example"
|
||||
path = "examples/assistant_example.rs"
|
||||
crate-type = ["bin"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant_tooling.workspace = true
|
||||
client.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
open_ai.workspace = true
|
||||
project.workspace = true
|
||||
rich_text.workspace = true
|
||||
semantic_index.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
nanoid = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
assets.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
languages.workspace = true
|
||||
node_runtime.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
release_channel.workspace = true
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
theme = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
1
crates/assistant2/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
129
crates/assistant2/examples/assistant_example.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use anyhow::Context as _;
|
||||
use assets::Assets;
|
||||
use assistant2::{tools::ProjectIndexTool, AssistantPanel};
|
||||
use assistant_tooling::ToolRegistry;
|
||||
use client::Client;
|
||||
use gpui::{actions, App, AppContext, KeyBinding, Task, View, WindowOptions};
|
||||
use language::LanguageRegistry;
|
||||
use project::Project;
|
||||
use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticIndex};
|
||||
use settings::{KeymapFile, DEFAULT_KEYMAP_PATH};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::LoadThemes;
|
||||
use ui::{div, prelude::*, Render};
|
||||
use util::{http::HttpClientWithUrl, ResultExt as _};
|
||||
|
||||
actions!(example, [Quit]);
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
env_logger::init();
|
||||
App::new().with_assets(Assets).run(|cx| {
|
||||
cx.bind_keys(Some(KeyBinding::new("cmd-q", Quit, None)));
|
||||
cx.on_action(|_: &Quit, cx: &mut AppContext| {
|
||||
cx.quit();
|
||||
});
|
||||
|
||||
if args.len() < 2 {
|
||||
eprintln!(
|
||||
"Usage: cargo run --example assistant_example -p assistant2 -- <project_path>"
|
||||
);
|
||||
cx.quit();
|
||||
return;
|
||||
}
|
||||
|
||||
settings::init(cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
editor::init(cx);
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
Assets.load_fonts(cx).unwrap();
|
||||
KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, cx).unwrap();
|
||||
client::init_settings(cx);
|
||||
release_channel::init("0.130.0", cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
{
|
||||
let client = client.clone();
|
||||
cx.spawn(|cx| async move { client.authenticate_and_connect(false, &cx).await })
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
assistant2::init(client.clone(), cx);
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::new(
|
||||
Task::ready(()),
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
let node_runtime = node_runtime::RealNodeRuntime::new(client.http_client());
|
||||
languages::init(language_registry.clone(), node_runtime, cx);
|
||||
|
||||
let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434"));
|
||||
|
||||
let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY not set");
|
||||
let embedding_provider = OpenAiEmbeddingProvider::new(
|
||||
http.clone(),
|
||||
OpenAiEmbeddingModel::TextEmbedding3Small,
|
||||
open_ai::OPEN_AI_API_URL.to_string(),
|
||||
api_key,
|
||||
);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut semantic_index = SemanticIndex::new(
|
||||
PathBuf::from("/tmp/semantic-index-db.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_path = Path::new(&args[1]);
|
||||
let project = Project::example([project_path], &mut cx).await;
|
||||
|
||||
cx.update(|cx| {
|
||||
let fs = project.read(cx).fs().clone();
|
||||
|
||||
let project_index = semantic_index.project_index(project.clone(), cx);
|
||||
|
||||
let mut tool_registry = ToolRegistry::new();
|
||||
tool_registry
|
||||
.register(ProjectIndexTool::new(project_index.clone(), fs.clone()))
|
||||
.context("failed to register ProjectIndexTool")
|
||||
.log_err();
|
||||
|
||||
let tool_registry = Arc::new(tool_registry);
|
||||
|
||||
cx.open_window(WindowOptions::default(), |cx| {
|
||||
cx.new_view(|cx| Example::new(language_registry, tool_registry, cx))
|
||||
});
|
||||
cx.activate(true);
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
}
|
||||
|
||||
struct Example {
|
||||
assistant_panel: View<AssistantPanel>,
|
||||
}
|
||||
|
||||
impl Example {
|
||||
fn new(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
assistant_panel: cx
|
||||
.new_view(|cx| AssistantPanel::new(language_registry, tool_registry, cx)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for Example {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl ui::prelude::IntoElement {
|
||||
div().size_full().child(self.assistant_panel.clone())
|
||||
}
|
||||
}
|
||||
218
crates/assistant2/examples/chat-with-functions.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use anyhow::Context as _;
|
||||
use assets::Assets;
|
||||
use assistant2::AssistantPanel;
|
||||
use assistant_tooling::{LanguageModelTool, ToolRegistry};
|
||||
use client::Client;
|
||||
use gpui::{actions, AnyElement, App, AppContext, KeyBinding, Task, View, WindowOptions};
|
||||
use language::LanguageRegistry;
|
||||
use project::Project;
|
||||
use rand::Rng;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{KeymapFile, DEFAULT_KEYMAP_PATH};
|
||||
use std::sync::Arc;
|
||||
use theme::LoadThemes;
|
||||
use ui::{div, prelude::*, Render};
|
||||
use util::ResultExt as _;
|
||||
|
||||
actions!(example, [Quit]);
|
||||
|
||||
struct RollDiceTool {}
|
||||
|
||||
impl RollDiceTool {
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum Die {
|
||||
D6 = 6,
|
||||
D20 = 20,
|
||||
}
|
||||
|
||||
impl Die {
|
||||
fn into_str(&self) -> &'static str {
|
||||
match self {
|
||||
Die::D6 => "d6",
|
||||
Die::D20 => "d20",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Clone)]
|
||||
struct DiceParams {
|
||||
/// The number of dice to roll.
|
||||
num_dice: u8,
|
||||
/// Which die to roll. Defaults to a d6 if not provided.
|
||||
die_type: Option<Die>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DieRoll {
|
||||
die: Die,
|
||||
roll: u8,
|
||||
}
|
||||
|
||||
impl DieRoll {
|
||||
fn render(&self) -> AnyElement {
|
||||
match self.die {
|
||||
Die::D6 => {
|
||||
let face = match self.roll {
|
||||
6 => div().child("⚅"),
|
||||
5 => div().child("⚄"),
|
||||
4 => div().child("⚃"),
|
||||
3 => div().child("⚂"),
|
||||
2 => div().child("⚁"),
|
||||
1 => div().child("⚀"),
|
||||
_ => div().child("😅"),
|
||||
};
|
||||
face.text_3xl().into_any_element()
|
||||
}
|
||||
_ => div()
|
||||
.child(format!("{}", self.roll))
|
||||
.text_3xl()
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DiceRoll {
|
||||
rolls: Vec<DieRoll>,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for RollDiceTool {
|
||||
type Input = DiceParams;
|
||||
type Output = DiceRoll;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"roll_dice".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Rolls N many dice and returns the results.".to_string()
|
||||
}
|
||||
|
||||
fn execute(&self, input: &Self::Input, _cx: &AppContext) -> Task<gpui::Result<Self::Output>> {
|
||||
let rolls = (0..input.num_dice)
|
||||
.map(|_| {
|
||||
let die_type = input.die_type.as_ref().unwrap_or(&Die::D6).clone();
|
||||
|
||||
DieRoll {
|
||||
die: die_type.clone(),
|
||||
roll: rand::thread_rng().gen_range(1..=die_type as u8),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
return Task::ready(Ok(DiceRoll { rolls }));
|
||||
}
|
||||
|
||||
fn render(
|
||||
_tool_call_id: &str,
|
||||
_input: &Self::Input,
|
||||
output: &Self::Output,
|
||||
_cx: &mut WindowContext,
|
||||
) -> gpui::AnyElement {
|
||||
h_flex()
|
||||
.children(
|
||||
output
|
||||
.rolls
|
||||
.iter()
|
||||
.map(|roll| div().p_2().child(roll.render())),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn format(_input: &Self::Input, output: &Self::Output) -> String {
|
||||
let mut result = String::new();
|
||||
for roll in &output.rolls {
|
||||
let die = &roll.die;
|
||||
result.push_str(&format!("{}: {}\n", die.into_str(), roll.roll));
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
App::new().with_assets(Assets).run(|cx| {
|
||||
cx.bind_keys(Some(KeyBinding::new("cmd-q", Quit, None)));
|
||||
cx.on_action(|_: &Quit, cx: &mut AppContext| {
|
||||
cx.quit();
|
||||
});
|
||||
|
||||
settings::init(cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
editor::init(cx);
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
Assets.load_fonts(cx).unwrap();
|
||||
KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, cx).unwrap();
|
||||
client::init_settings(cx);
|
||||
release_channel::init("0.130.0", cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
{
|
||||
let client = client.clone();
|
||||
cx.spawn(|cx| async move { client.authenticate_and_connect(false, &cx).await })
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
assistant2::init(client.clone(), cx);
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::new(
|
||||
Task::ready(()),
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
let node_runtime = node_runtime::RealNodeRuntime::new(client.http_client());
|
||||
languages::init(language_registry.clone(), node_runtime, cx);
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
cx.update(|cx| {
|
||||
let mut tool_registry = ToolRegistry::new();
|
||||
tool_registry
|
||||
.register(RollDiceTool::new())
|
||||
.context("failed to register DummyTool")
|
||||
.log_err();
|
||||
|
||||
let tool_registry = Arc::new(tool_registry);
|
||||
|
||||
println!("Tools registered");
|
||||
for definition in tool_registry.definitions() {
|
||||
println!("{}", definition);
|
||||
}
|
||||
|
||||
cx.open_window(WindowOptions::default(), |cx| {
|
||||
cx.new_view(|cx| Example::new(language_registry, tool_registry, cx))
|
||||
});
|
||||
cx.activate(true);
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
}
|
||||
|
||||
struct Example {
|
||||
assistant_panel: View<AssistantPanel>,
|
||||
}
|
||||
|
||||
impl Example {
|
||||
fn new(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
assistant_panel: cx
|
||||
.new_view(|cx| AssistantPanel::new(language_registry, tool_registry, cx)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for Example {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl ui::prelude::IntoElement {
|
||||
div().size_full().child(self.assistant_panel.clone())
|
||||
}
|
||||
}
|
||||
959
crates/assistant2/src/assistant2.rs
Normal file
@@ -0,0 +1,959 @@
|
||||
mod assistant_settings;
|
||||
mod completion_provider;
|
||||
mod markdown_message;
|
||||
pub mod tools;
|
||||
|
||||
use markdown_message::*;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use assistant_tooling::{ToolFunctionCall, ToolRegistry};
|
||||
use client::{proto, Client};
|
||||
use completion_provider::*;
|
||||
use editor::Editor;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use futures::{channel::oneshot, future::join_all, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
list, prelude::*, AnyElement, AppContext, AsyncWindowContext, EventEmitter, FocusHandle,
|
||||
FocusableView, Global, ListAlignment, ListState, Model, Render, Task, View, WeakView,
|
||||
};
|
||||
use language::{language_settings::SoftWrap, LanguageRegistry};
|
||||
use open_ai::{FunctionContent, ToolCall, ToolCallContent};
|
||||
use project::Fs;
|
||||
use rich_text::RichText;
|
||||
use semantic_index::{CloudEmbeddingProvider, ProjectIndex, SemanticIndex};
|
||||
use serde::Deserialize;
|
||||
use settings::Settings;
|
||||
use std::{cmp, sync::Arc};
|
||||
use theme::ThemeSettings;
|
||||
use tools::ProjectIndexTool;
|
||||
use ui::{popover_menu, prelude::*, ButtonLike, CollapsibleContainer, Color, ContextMenu, Tooltip};
|
||||
use util::{paths::EMBEDDINGS_DIR, ResultExt};
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
Workspace,
|
||||
};
|
||||
|
||||
pub use assistant_settings::AssistantSettings;
|
||||
|
||||
const MAX_COMPLETION_CALLS_PER_SUBMISSION: usize = 5;
|
||||
|
||||
#[derive(Eq, PartialEq, Copy, Clone, Deserialize)]
|
||||
pub struct Submit(SubmitMode);
|
||||
|
||||
/// There are multiple different ways to submit a model request, represented by this enum.
|
||||
#[derive(Eq, PartialEq, Copy, Clone, Deserialize)]
|
||||
pub enum SubmitMode {
|
||||
/// Only include the conversation.
|
||||
Simple,
|
||||
/// Send the current file as context.
|
||||
CurrentFile,
|
||||
/// Search the codebase and send relevant excerpts.
|
||||
Codebase,
|
||||
}
|
||||
|
||||
gpui::actions!(assistant2, [Cancel, ToggleFocus]);
|
||||
gpui::impl_actions!(assistant2, [Submit]);
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
AssistantSettings::register(cx);
|
||||
|
||||
cx.spawn(|mut cx| {
|
||||
let client = client.clone();
|
||||
async move {
|
||||
let embedding_provider = CloudEmbeddingProvider::new(client.clone());
|
||||
let semantic_index = SemanticIndex::new(
|
||||
EMBEDDINGS_DIR.join("semantic-index-db.0.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
cx.update(|cx| cx.set_global(semantic_index))
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.set_global(CompletionProvider::new(CloudCompletionProvider::new(
|
||||
client,
|
||||
)));
|
||||
|
||||
cx.observe_new_views(
|
||||
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
|
||||
workspace.register_action(|workspace, _: &ToggleFocus, cx| {
|
||||
workspace.toggle_panel_focus::<AssistantPanel>(cx);
|
||||
});
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn enabled(cx: &AppContext) -> bool {
|
||||
cx.is_staff()
|
||||
}
|
||||
|
||||
pub struct AssistantPanel {
|
||||
chat: View<AssistantChat>,
|
||||
width: Option<Pixels>,
|
||||
}
|
||||
|
||||
impl AssistantPanel {
|
||||
pub fn load(
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<View<Self>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (app_state, project) = workspace.update(&mut cx, |workspace, _| {
|
||||
(workspace.app_state().clone(), workspace.project().clone())
|
||||
})?;
|
||||
|
||||
cx.new_view(|cx| {
|
||||
// todo!("this will panic if the semantic index failed to load or has not loaded yet")
|
||||
let project_index = cx.update_global(|semantic_index: &mut SemanticIndex, cx| {
|
||||
semantic_index.project_index(project.clone(), cx)
|
||||
});
|
||||
|
||||
let mut tool_registry = ToolRegistry::new();
|
||||
tool_registry
|
||||
.register(ProjectIndexTool::new(
|
||||
project_index.clone(),
|
||||
app_state.fs.clone(),
|
||||
))
|
||||
.context("failed to register ProjectIndexTool")
|
||||
.log_err();
|
||||
|
||||
let tool_registry = Arc::new(tool_registry);
|
||||
|
||||
Self::new(app_state.languages.clone(), tool_registry, cx)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let chat = cx.new_view(|cx| {
|
||||
AssistantChat::new(language_registry.clone(), tool_registry.clone(), cx)
|
||||
});
|
||||
|
||||
Self { width: None, chat }
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AssistantPanel {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.size_full()
|
||||
.v_flex()
|
||||
.p_2()
|
||||
.bg(cx.theme().colors().background)
|
||||
.child(self.chat.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Panel for AssistantPanel {
|
||||
fn persistent_name() -> &'static str {
|
||||
"AssistantPanelv2"
|
||||
}
|
||||
|
||||
fn position(&self, _cx: &WindowContext) -> workspace::dock::DockPosition {
|
||||
// todo!("Add a setting / use assistant settings")
|
||||
DockPosition::Right
|
||||
}
|
||||
|
||||
fn position_is_valid(&self, position: workspace::dock::DockPosition) -> bool {
|
||||
matches!(position, DockPosition::Right)
|
||||
}
|
||||
|
||||
fn set_position(&mut self, _: workspace::dock::DockPosition, _: &mut ViewContext<Self>) {
|
||||
// Do nothing until we have a setting for this
|
||||
}
|
||||
|
||||
fn size(&self, _cx: &WindowContext) -> Pixels {
|
||||
self.width.unwrap_or(px(400.))
|
||||
}
|
||||
|
||||
fn set_size(&mut self, size: Option<Pixels>, cx: &mut ViewContext<Self>) {
|
||||
self.width = size;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn icon(&self, _cx: &WindowContext) -> Option<ui::IconName> {
|
||||
Some(IconName::Ai)
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self, _: &WindowContext) -> Option<&'static str> {
|
||||
Some("Assistant Panel ✨")
|
||||
}
|
||||
|
||||
fn toggle_action(&self) -> Box<dyn gpui::Action> {
|
||||
Box::new(ToggleFocus)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<PanelEvent> for AssistantPanel {}
|
||||
|
||||
impl FocusableView for AssistantPanel {
|
||||
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
|
||||
self.chat
|
||||
.read(cx)
|
||||
.messages
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|msg| msg.focus_handle(cx))
|
||||
.expect("no user message in chat")
|
||||
}
|
||||
}
|
||||
|
||||
struct AssistantChat {
|
||||
model: String,
|
||||
messages: Vec<ChatMessage>,
|
||||
list_state: ListState,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
next_message_id: MessageId,
|
||||
pending_completion: Option<Task<()>>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
}
|
||||
|
||||
impl AssistantChat {
|
||||
fn new(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let model = CompletionProvider::get(cx).default_model();
|
||||
let view = cx.view().downgrade();
|
||||
let list_state = ListState::new(
|
||||
0,
|
||||
ListAlignment::Bottom,
|
||||
px(1024.),
|
||||
move |ix, cx: &mut WindowContext| {
|
||||
view.update(cx, |this, cx| this.render_message(ix, cx))
|
||||
.unwrap()
|
||||
},
|
||||
);
|
||||
|
||||
let mut this = Self {
|
||||
model,
|
||||
messages: Vec::new(),
|
||||
list_state,
|
||||
language_registry,
|
||||
next_message_id: MessageId(0),
|
||||
pending_completion: None,
|
||||
tool_registry,
|
||||
};
|
||||
this.push_new_user_message(true, cx);
|
||||
this
|
||||
}
|
||||
|
||||
fn focused_message_id(&self, cx: &WindowContext) -> Option<MessageId> {
|
||||
self.messages.iter().find_map(|message| match message {
|
||||
ChatMessage::User(message) => message
|
||||
.body
|
||||
.focus_handle(cx)
|
||||
.contains_focused(cx)
|
||||
.then_some(message.id),
|
||||
ChatMessage::Assistant(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
if self.pending_completion.take().is_none() {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(ChatMessage::Assistant(message)) = self.messages.last() {
|
||||
if message.body.text.is_empty() {
|
||||
self.pop_message(cx);
|
||||
} else {
|
||||
self.push_new_user_message(false, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn submit(&mut self, Submit(mode): &Submit, cx: &mut ViewContext<Self>) {
|
||||
let Some(focused_message_id) = self.focused_message_id(cx) else {
|
||||
log::error!("unexpected state: no user message editor is focused.");
|
||||
return;
|
||||
};
|
||||
|
||||
self.truncate_messages(focused_message_id, cx);
|
||||
|
||||
let mode = *mode;
|
||||
self.pending_completion = Some(cx.spawn(move |this, mut cx| async move {
|
||||
Self::request_completion(
|
||||
this.clone(),
|
||||
mode,
|
||||
MAX_COMPLETION_CALLS_PER_SUBMISSION,
|
||||
&mut cx,
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let focus = this
|
||||
.user_message(focused_message_id)
|
||||
.body
|
||||
.focus_handle(cx)
|
||||
.contains_focused(cx);
|
||||
this.push_new_user_message(focus, cx);
|
||||
this.pending_completion = None;
|
||||
})
|
||||
.context("Failed to push new user message")
|
||||
.log_err();
|
||||
}));
|
||||
}
|
||||
|
||||
async fn request_completion(
|
||||
this: WeakView<Self>,
|
||||
mode: SubmitMode,
|
||||
limit: usize,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let mut call_count = 0;
|
||||
loop {
|
||||
let complete = async {
|
||||
let completion = this.update(cx, |this, cx| {
|
||||
this.push_new_assistant_message(cx);
|
||||
|
||||
let definitions = if call_count < limit && matches!(mode, SubmitMode::Codebase)
|
||||
{
|
||||
this.tool_registry.definitions()
|
||||
} else {
|
||||
&[]
|
||||
};
|
||||
call_count += 1;
|
||||
|
||||
CompletionProvider::get(cx).complete(
|
||||
this.model.clone(),
|
||||
this.completion_messages(cx),
|
||||
Vec::new(),
|
||||
1.0,
|
||||
definitions,
|
||||
)
|
||||
});
|
||||
|
||||
let mut stream = completion?.await?;
|
||||
let mut body = String::new();
|
||||
while let Some(delta) = stream.next().await {
|
||||
let delta = delta?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(ChatMessage::Assistant(AssistantMessage {
|
||||
body: message_body,
|
||||
tool_calls: message_tool_calls,
|
||||
..
|
||||
})) = this.messages.last_mut()
|
||||
{
|
||||
if let Some(content) = &delta.content {
|
||||
body.push_str(content);
|
||||
}
|
||||
|
||||
for tool_call in delta.tool_calls {
|
||||
let index = tool_call.index as usize;
|
||||
if index >= message_tool_calls.len() {
|
||||
message_tool_calls.resize_with(index + 1, Default::default);
|
||||
}
|
||||
let call = &mut message_tool_calls[index];
|
||||
|
||||
if let Some(id) = &tool_call.id {
|
||||
call.id.push_str(id);
|
||||
}
|
||||
|
||||
match tool_call.variant {
|
||||
Some(proto::tool_call_delta::Variant::Function(tool_call)) => {
|
||||
if let Some(name) = &tool_call.name {
|
||||
call.name.push_str(name);
|
||||
}
|
||||
if let Some(arguments) = &tool_call.arguments {
|
||||
call.arguments.push_str(arguments);
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
*message_body =
|
||||
RichText::new(body.clone(), &[], &this.language_registry);
|
||||
cx.notify();
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.await;
|
||||
|
||||
let mut tool_tasks = Vec::new();
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(ChatMessage::Assistant(AssistantMessage {
|
||||
error: message_error,
|
||||
tool_calls,
|
||||
..
|
||||
})) = this.messages.last_mut()
|
||||
{
|
||||
if let Err(error) = complete {
|
||||
message_error.replace(SharedString::from(error.to_string()));
|
||||
cx.notify();
|
||||
} else {
|
||||
for tool_call in tool_calls.iter() {
|
||||
tool_tasks.push(this.tool_registry.call(tool_call, cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
if tool_tasks.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let tools = join_all(tool_tasks.into_iter()).await;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(ChatMessage::Assistant(AssistantMessage { tool_calls, .. })) =
|
||||
this.messages.last_mut()
|
||||
{
|
||||
*tool_calls = tools;
|
||||
cx.notify();
|
||||
}
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
fn user_message(&mut self, message_id: MessageId) -> &mut UserMessage {
|
||||
self.messages
|
||||
.iter_mut()
|
||||
.find_map(|message| match message {
|
||||
ChatMessage::User(user_message) if user_message.id == message_id => {
|
||||
Some(user_message)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.expect("User message not found")
|
||||
}
|
||||
|
||||
fn push_new_user_message(&mut self, focus: bool, cx: &mut ViewContext<Self>) {
|
||||
let id = self.next_message_id.post_inc();
|
||||
let body = cx.new_view(|cx| {
|
||||
let mut editor = Editor::auto_height(80, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
if focus {
|
||||
cx.focus_self();
|
||||
}
|
||||
editor
|
||||
});
|
||||
let message = ChatMessage::User(UserMessage {
|
||||
id,
|
||||
body,
|
||||
contexts: Vec::new(),
|
||||
});
|
||||
self.push_message(message, cx);
|
||||
}
|
||||
|
||||
fn push_new_assistant_message(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let message = ChatMessage::Assistant(AssistantMessage {
|
||||
id: self.next_message_id.post_inc(),
|
||||
body: RichText::default(),
|
||||
tool_calls: Vec::new(),
|
||||
error: None,
|
||||
});
|
||||
self.push_message(message, cx);
|
||||
}
|
||||
|
||||
fn push_message(&mut self, message: ChatMessage, cx: &mut ViewContext<Self>) {
|
||||
let old_len = self.messages.len();
|
||||
let focus_handle = Some(message.focus_handle(cx));
|
||||
self.messages.push(message);
|
||||
self.list_state
|
||||
.splice_focusable(old_len..old_len, focus_handle);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn pop_message(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if self.messages.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.messages.pop();
|
||||
self.list_state
|
||||
.splice(self.messages.len()..self.messages.len() + 1, 0);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn truncate_messages(&mut self, last_message_id: MessageId, cx: &mut ViewContext<Self>) {
|
||||
if let Some(index) = self.messages.iter().position(|message| match message {
|
||||
ChatMessage::User(message) => message.id == last_message_id,
|
||||
ChatMessage::Assistant(message) => message.id == last_message_id,
|
||||
}) {
|
||||
self.list_state.splice(index + 1..self.messages.len(), 0);
|
||||
self.messages.truncate(index + 1);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn render_error(
|
||||
&self,
|
||||
error: Option<SharedString>,
|
||||
_ix: usize,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> AnyElement {
|
||||
let theme = cx.theme();
|
||||
|
||||
if let Some(error) = error {
|
||||
div()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.neg_mx_1()
|
||||
.rounded_md()
|
||||
.border()
|
||||
.border_color(theme.status().error_border)
|
||||
// .bg(theme.status().error_background)
|
||||
.text_color(theme.status().error)
|
||||
.child(error.clone())
|
||||
.into_any_element()
|
||||
} else {
|
||||
div().into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
fn render_message(&self, ix: usize, cx: &mut ViewContext<Self>) -> AnyElement {
|
||||
let is_last = ix == self.messages.len() - 1;
|
||||
|
||||
match &self.messages[ix] {
|
||||
ChatMessage::User(UserMessage {
|
||||
body,
|
||||
contexts: _contexts,
|
||||
..
|
||||
}) => div()
|
||||
.when(!is_last, |element| element.mb_2())
|
||||
.child(div().p_2().child(Label::new("You").color(Color::Default)))
|
||||
.child(
|
||||
div()
|
||||
.on_action(cx.listener(Self::submit))
|
||||
.p_2()
|
||||
.text_color(cx.theme().colors().editor_foreground)
|
||||
.font(ThemeSettings::get_global(cx).buffer_font.clone())
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(body.clone()), // .children(contexts.iter().map(|context| context.render(cx))),
|
||||
)
|
||||
.into_any(),
|
||||
ChatMessage::Assistant(AssistantMessage {
|
||||
id,
|
||||
body,
|
||||
error,
|
||||
tool_calls,
|
||||
..
|
||||
}) => {
|
||||
let assistant_body = if body.text.is_empty() && !tool_calls.is_empty() {
|
||||
div()
|
||||
} else {
|
||||
div().p_2().child(body.element(ElementId::from(id.0), cx))
|
||||
};
|
||||
|
||||
div()
|
||||
.when(!is_last, |element| element.mb_2())
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.child(Label::new("Assistant").color(Color::Modified)),
|
||||
)
|
||||
.child(assistant_body)
|
||||
.child(self.render_error(error.clone(), ix, cx))
|
||||
.children(tool_calls.iter().map(|tool_call| {
|
||||
let result = &tool_call.result;
|
||||
let name = tool_call.name.clone();
|
||||
match result {
|
||||
Some(result) => div()
|
||||
.p_2()
|
||||
.child(result.render(&name, &tool_call.id, cx))
|
||||
.into_any(),
|
||||
None => div()
|
||||
.p_2()
|
||||
.child(Label::new(name).color(Color::Modified))
|
||||
.child("Running...")
|
||||
.into_any(),
|
||||
}
|
||||
}))
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_messages(&self, cx: &WindowContext) -> Vec<CompletionMessage> {
|
||||
let mut completion_messages = Vec::new();
|
||||
|
||||
for message in &self.messages {
|
||||
match message {
|
||||
ChatMessage::User(UserMessage { body, contexts, .. }) => {
|
||||
// setup context for model
|
||||
contexts.iter().for_each(|context| {
|
||||
completion_messages.extend(context.completion_messages(cx))
|
||||
});
|
||||
|
||||
// Show user's message last so that the assistant is grounded in the user's request
|
||||
completion_messages.push(CompletionMessage::User {
|
||||
content: body.read(cx).text(cx),
|
||||
});
|
||||
}
|
||||
ChatMessage::Assistant(AssistantMessage {
|
||||
body, tool_calls, ..
|
||||
}) => {
|
||||
// In no case do we want to send an empty message. This shouldn't happen, but we might as well
|
||||
// not break the Chat API if it does.
|
||||
if body.text.is_empty() && tool_calls.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tool_calls_from_assistant = tool_calls
|
||||
.iter()
|
||||
.map(|tool_call| ToolCall {
|
||||
content: ToolCallContent::Function {
|
||||
function: FunctionContent {
|
||||
name: tool_call.name.clone(),
|
||||
arguments: tool_call.arguments.clone(),
|
||||
},
|
||||
},
|
||||
id: tool_call.id.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
completion_messages.push(CompletionMessage::Assistant {
|
||||
content: Some(body.text.to_string()),
|
||||
tool_calls: tool_calls_from_assistant,
|
||||
});
|
||||
|
||||
for tool_call in tool_calls {
|
||||
// todo!(): we should not be sending when the tool is still running / has no result
|
||||
// For now I'm going to have to assume we send an empty string because otherwise
|
||||
// the Chat API will break -- there is a required message for every tool call by ID
|
||||
let content = match &tool_call.result {
|
||||
Some(result) => result.format(&tool_call.name),
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
completion_messages.push(CompletionMessage::Tool {
|
||||
content,
|
||||
tool_call_id: tool_call.id.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completion_messages
|
||||
}
|
||||
|
||||
fn render_model_dropdown(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let this = cx.view().downgrade();
|
||||
div().h_flex().justify_end().child(
|
||||
div().w_32().child(
|
||||
popover_menu("user-menu")
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::get(cx).available_models() {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| Label::new(model.clone()).into_any_element()
|
||||
},
|
||||
{
|
||||
let this = this.clone();
|
||||
move |cx| {
|
||||
_ = this.update(cx, |this, cx| {
|
||||
this.model = model.clone();
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
ButtonLike::new("active-model")
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
div()
|
||||
.overflow_x_hidden()
|
||||
.flex_grow()
|
||||
.whitespace_nowrap()
|
||||
.child(Label::new(self.model.clone())),
|
||||
)
|
||||
.child(div().child(
|
||||
Icon::new(IconName::ChevronDown).color(Color::Muted),
|
||||
)),
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.tooltip(move |cx| Tooltip::text("Change Model", cx)),
|
||||
)
|
||||
.anchor(gpui::AnchorCorner::TopRight),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AssistantChat {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.relative()
|
||||
.flex_1()
|
||||
.v_flex()
|
||||
.key_context("AssistantChat")
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.text_color(Color::Default.color(cx))
|
||||
.child(self.render_model_dropdown(cx))
|
||||
.child(list(self.list_state.clone()).flex_1())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
struct MessageId(usize);
|
||||
|
||||
impl MessageId {
|
||||
fn post_inc(&mut self) -> Self {
|
||||
let id = *self;
|
||||
self.0 += 1;
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
enum ChatMessage {
|
||||
User(UserMessage),
|
||||
Assistant(AssistantMessage),
|
||||
}
|
||||
|
||||
impl ChatMessage {
|
||||
fn focus_handle(&self, cx: &AppContext) -> Option<FocusHandle> {
|
||||
match self {
|
||||
ChatMessage::User(UserMessage { body, .. }) => Some(body.focus_handle(cx)),
|
||||
ChatMessage::Assistant(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct UserMessage {
|
||||
id: MessageId,
|
||||
body: View<Editor>,
|
||||
contexts: Vec<AssistantContext>,
|
||||
}
|
||||
|
||||
struct AssistantMessage {
|
||||
id: MessageId,
|
||||
body: RichText,
|
||||
tool_calls: Vec<ToolFunctionCall>,
|
||||
error: Option<SharedString>,
|
||||
}
|
||||
|
||||
// Since we're swapping out for direct query usage, we might not need to use this injected context
|
||||
// It will be useful though for when the user _definitely_ wants the model to see a specific file,
|
||||
// query, error, etc.
|
||||
#[allow(dead_code)]
|
||||
enum AssistantContext {
|
||||
Codebase(View<CodebaseContext>),
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
struct CodebaseExcerpt {
|
||||
element_id: ElementId,
|
||||
path: SharedString,
|
||||
text: SharedString,
|
||||
score: f32,
|
||||
expanded: bool,
|
||||
}
|
||||
|
||||
impl AssistantContext {
|
||||
#[allow(dead_code)]
|
||||
fn render(&self, _cx: &mut ViewContext<AssistantChat>) -> AnyElement {
|
||||
match self {
|
||||
AssistantContext::Codebase(context) => context.clone().into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_messages(&self, cx: &WindowContext) -> Vec<CompletionMessage> {
|
||||
match self {
|
||||
AssistantContext::Codebase(context) => context.read(cx).completion_messages(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum CodebaseContext {
|
||||
Pending { _task: Task<()> },
|
||||
Done(Result<Vec<CodebaseExcerpt>>),
|
||||
}
|
||||
|
||||
impl CodebaseContext {
|
||||
fn toggle_expanded(&mut self, element_id: ElementId, cx: &mut ViewContext<Self>) {
|
||||
if let CodebaseContext::Done(Ok(excerpts)) = self {
|
||||
if let Some(excerpt) = excerpts
|
||||
.iter_mut()
|
||||
.find(|excerpt| excerpt.element_id == element_id)
|
||||
{
|
||||
excerpt.expanded = !excerpt.expanded;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for CodebaseContext {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
match self {
|
||||
CodebaseContext::Pending { .. } => div()
|
||||
.h_flex()
|
||||
.items_center()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Ai).color(Color::Muted).into_element())
|
||||
.child("Searching codebase..."),
|
||||
CodebaseContext::Done(Ok(excerpts)) => {
|
||||
div()
|
||||
.v_flex()
|
||||
.gap_2()
|
||||
.children(excerpts.iter().map(|excerpt| {
|
||||
let expanded = excerpt.expanded;
|
||||
let element_id = excerpt.element_id.clone();
|
||||
|
||||
CollapsibleContainer::new(element_id.clone(), expanded)
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::File).color(Color::Muted))
|
||||
.child(Label::new(excerpt.path.clone()).color(Color::Muted)),
|
||||
)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.toggle_expanded(element_id.clone(), cx);
|
||||
}))
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
excerpt.text.clone(), // todo!(): Show as an editor block
|
||||
),
|
||||
)
|
||||
}))
|
||||
}
|
||||
CodebaseContext::Done(Err(error)) => div().child(error.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CodebaseContext {
|
||||
#[allow(dead_code)]
|
||||
fn new(
|
||||
query: impl 'static + Future<Output = Result<String>>,
|
||||
populated: oneshot::Sender<bool>,
|
||||
project_index: Model<ProjectIndex>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let query = query.boxed_local();
|
||||
let _task = cx.spawn(|this, mut cx| async move {
|
||||
let result = async {
|
||||
let query = query.await?;
|
||||
let results = this
|
||||
.update(&mut cx, |_this, cx| {
|
||||
project_index.read(cx).search(&query, 16, cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
let excerpts = results.into_iter().map(|result| {
|
||||
let abs_path = result
|
||||
.worktree
|
||||
.read_with(&cx, |worktree, _| worktree.abs_path().join(&result.path));
|
||||
let fs = fs.clone();
|
||||
|
||||
async move {
|
||||
let path = result.path.clone();
|
||||
let text = fs.load(&abs_path?).await?;
|
||||
// todo!("what should we do with stale ranges?");
|
||||
let range = cmp::min(result.range.start, text.len())
|
||||
..cmp::min(result.range.end, text.len());
|
||||
|
||||
let text = SharedString::from(text[range].to_string());
|
||||
|
||||
anyhow::Ok(CodebaseExcerpt {
|
||||
element_id: ElementId::Name(nanoid::nanoid!().into()),
|
||||
path: path.to_string_lossy().to_string().into(),
|
||||
text,
|
||||
score: result.score,
|
||||
expanded: false,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(
|
||||
futures::future::join_all(excerpts)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.populate(result, populated, cx);
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
|
||||
Self::Pending { _task }
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn populate(
|
||||
&mut self,
|
||||
result: Result<Vec<CodebaseExcerpt>>,
|
||||
populated: oneshot::Sender<bool>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let success = result.is_ok();
|
||||
*self = Self::Done(result);
|
||||
populated.send(success).ok();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn completion_messages(&self) -> Vec<CompletionMessage> {
|
||||
// One system message for the whole batch of excerpts:
|
||||
|
||||
// Semantic search results for user query:
|
||||
//
|
||||
// Excerpt from $path:
|
||||
// ~~~
|
||||
// `text`
|
||||
// ~~~
|
||||
//
|
||||
// Excerpt from $path:
|
||||
|
||||
match self {
|
||||
CodebaseContext::Done(Ok(excerpts)) => {
|
||||
if excerpts.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut body = "Semantic search results for user query:\n".to_string();
|
||||
|
||||
for excerpt in excerpts {
|
||||
body.push_str("Excerpt from ");
|
||||
body.push_str(excerpt.path.as_ref());
|
||||
body.push_str(", score ");
|
||||
body.push_str(&excerpt.score.to_string());
|
||||
body.push_str(":\n");
|
||||
body.push_str("~~~\n");
|
||||
body.push_str(excerpt.text.as_ref());
|
||||
body.push_str("~~~\n");
|
||||
}
|
||||
|
||||
vec![CompletionMessage::System { content: body }]
|
||||
}
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
26
crates/assistant2/src/assistant_settings.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize, Clone)]
|
||||
pub struct AssistantSettings {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, Serialize, Clone, JsonSchema)]
|
||||
pub struct AssistantSettingsContent {
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
impl Settings for AssistantSettings {
|
||||
const KEY: Option<&'static str> = Some("assistant_v2");
|
||||
|
||||
type FileContent = AssistantSettingsContent;
|
||||
|
||||
fn load(
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Ok(sources.json_merge().unwrap_or_else(|_| Default::default()))
|
||||
}
|
||||
}
|
||||
179
crates/assistant2/src/completion_provider.rs
Normal file
@@ -0,0 +1,179 @@
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::ToolFunctionDefinition;
|
||||
use client::{proto, Client};
|
||||
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
|
||||
use gpui::Global;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use open_ai::RequestMessage as CompletionMessage;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionProvider(Arc<dyn CompletionProviderBackend>);
|
||||
|
||||
impl CompletionProvider {
|
||||
pub fn new(backend: impl CompletionProviderBackend) -> Self {
|
||||
Self(Arc::new(backend))
|
||||
}
|
||||
|
||||
pub fn default_model(&self) -> String {
|
||||
self.0.default_model()
|
||||
}
|
||||
|
||||
pub fn available_models(&self) -> Vec<String> {
|
||||
self.0.available_models()
|
||||
}
|
||||
|
||||
pub fn complete(
|
||||
&self,
|
||||
model: String,
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>
|
||||
{
|
||||
self.0.complete(model, messages, stop, temperature, tools)
|
||||
}
|
||||
}
|
||||
|
||||
impl Global for CompletionProvider {}
|
||||
|
||||
pub trait CompletionProviderBackend: 'static {
|
||||
fn default_model(&self) -> String;
|
||||
fn available_models(&self) -> Vec<String>;
|
||||
fn complete(
|
||||
&self,
|
||||
model: String,
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>;
|
||||
}
|
||||
|
||||
pub struct CloudCompletionProvider {
|
||||
client: Arc<Client>,
|
||||
}
|
||||
|
||||
impl CloudCompletionProvider {
|
||||
pub fn new(client: Arc<Client>) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
}
|
||||
|
||||
impl CompletionProviderBackend for CloudCompletionProvider {
|
||||
fn default_model(&self) -> String {
|
||||
"gpt-4-turbo".into()
|
||||
}
|
||||
|
||||
fn available_models(&self) -> Vec<String> {
|
||||
vec!["gpt-4-turbo".into(), "gpt-4".into(), "gpt-3.5-turbo".into()]
|
||||
}
|
||||
|
||||
fn complete(
|
||||
&self,
|
||||
model: String,
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>
|
||||
{
|
||||
let client = self.client.clone();
|
||||
let tools: Vec<proto::ChatCompletionTool> = tools
|
||||
.iter()
|
||||
.filter_map(|tool| {
|
||||
Some(proto::ChatCompletionTool {
|
||||
variant: Some(proto::chat_completion_tool::Variant::Function(
|
||||
proto::chat_completion_tool::FunctionObject {
|
||||
name: tool.name.clone(),
|
||||
description: Some(tool.description.clone()),
|
||||
parameters: Some(serde_json::to_string(&tool.parameters).ok()?),
|
||||
},
|
||||
)),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let tool_choice = match tools.is_empty() {
|
||||
true => None,
|
||||
false => Some("auto".into()),
|
||||
};
|
||||
|
||||
async move {
|
||||
let stream = client
|
||||
.request_stream(proto::CompleteWithLanguageModel {
|
||||
model,
|
||||
messages: messages
|
||||
.into_iter()
|
||||
.map(|message| match message {
|
||||
CompletionMessage::Assistant {
|
||||
content,
|
||||
tool_calls,
|
||||
} => proto::LanguageModelRequestMessage {
|
||||
role: proto::LanguageModelRole::LanguageModelAssistant as i32,
|
||||
content: content.unwrap_or_default(),
|
||||
tool_call_id: None,
|
||||
tool_calls: tool_calls
|
||||
.into_iter()
|
||||
.map(|tool_call| match tool_call.content {
|
||||
open_ai::ToolCallContent::Function { function } => {
|
||||
proto::ToolCall {
|
||||
id: tool_call.id,
|
||||
variant: Some(proto::tool_call::Variant::Function(
|
||||
proto::tool_call::FunctionCall {
|
||||
name: function.name,
|
||||
arguments: function.arguments,
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
CompletionMessage::User { content } => {
|
||||
proto::LanguageModelRequestMessage {
|
||||
role: proto::LanguageModelRole::LanguageModelUser as i32,
|
||||
content,
|
||||
tool_call_id: None,
|
||||
tool_calls: Vec::new(),
|
||||
}
|
||||
}
|
||||
CompletionMessage::System { content } => {
|
||||
proto::LanguageModelRequestMessage {
|
||||
role: proto::LanguageModelRole::LanguageModelSystem as i32,
|
||||
content,
|
||||
tool_calls: Vec::new(),
|
||||
tool_call_id: None,
|
||||
}
|
||||
}
|
||||
CompletionMessage::Tool {
|
||||
content,
|
||||
tool_call_id,
|
||||
} => proto::LanguageModelRequestMessage {
|
||||
role: proto::LanguageModelRole::LanguageModelTool as i32,
|
||||
content,
|
||||
tool_call_id: Some(tool_call_id),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
stop,
|
||||
temperature,
|
||||
tool_choice,
|
||||
tools,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(stream
|
||||
.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(mut response) => Some(Ok(response.choices.pop()?.delta?)),
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
.boxed())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
129
crates/assistant2/src/markdown_message.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use gpui::{prelude::*, InteractiveText, Task, View, ViewContext};
|
||||
use language::{LanguageRegistry, Rope};
|
||||
use rich_text::Highlight;
|
||||
use std::{ops::Range, sync::Arc};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct MarkdownMessage {
|
||||
message: Rope,
|
||||
parsed: ParsedMarkdown,
|
||||
should_reparse: bool,
|
||||
pending_parse: Option<Task<()>>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ParsedMarkdown {
|
||||
message: Rope,
|
||||
highlights: Vec<(Range<usize>, Highlight)>,
|
||||
link_ranges: Vec<Range<usize>>,
|
||||
link_urls: Vec<String>,
|
||||
}
|
||||
|
||||
impl MarkdownMessage {
|
||||
pub fn new(text: &str, language_registry: Arc<LanguageRegistry>) -> Self {
|
||||
Self {
|
||||
message: Rope::new(),
|
||||
should_reparse: false,
|
||||
pending_parse: None,
|
||||
parsed: ParsedMarkdown::default(),
|
||||
language_registry,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, text: &str, cx: &mut ViewContext<Self>) {
|
||||
self.message.push(text);
|
||||
self.parsed.message = self.message.clone();
|
||||
self.reparse(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn reparse(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.should_reparse = true;
|
||||
if self.pending_parse.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let message = self.message.clone();
|
||||
let language_registry = self.language_registry.clone();
|
||||
self.should_reparse = false;
|
||||
self.pending_parse = Some(cx.spawn(|this, cx| async move {
|
||||
let parsed = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let input = self.message.to_string();
|
||||
let mut output = String::new();
|
||||
let mut highlights = Vec::new();
|
||||
let mut link_ranges = Vec::new();
|
||||
let mut link_urls = Vec::new();
|
||||
rich_text::render_markdown_mut(
|
||||
&input,
|
||||
&[],
|
||||
&language_registry,
|
||||
None,
|
||||
&mut output,
|
||||
&mut highlights,
|
||||
&mut link_ranges,
|
||||
&mut link_urls,
|
||||
);
|
||||
|
||||
ParsedMarkdown {
|
||||
message,
|
||||
highlights,
|
||||
link_ranges,
|
||||
link_urls,
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.parsed = parsed;
|
||||
this.parsed.message = this.message.clone();
|
||||
this.pending_parse = None;
|
||||
if this.should_reparse {
|
||||
this.reparse(cx);
|
||||
}
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MarkdownMessage {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
MarkdownMessageElement(cx.view().clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MarkdownMessageElement(View<MarkdownMessage>);
|
||||
|
||||
impl Element for MarkdownMessageElement {
|
||||
type RequestLayoutState = InteractiveText;
|
||||
type PrepaintState = ();
|
||||
|
||||
fn request_layout(
|
||||
&mut self,
|
||||
cx: &mut ui::prelude::WindowContext,
|
||||
) -> (gpui::LayoutId, Self::RequestLayoutState) {
|
||||
}
|
||||
|
||||
fn prepaint(
|
||||
&mut self,
|
||||
bounds: gpui::Bounds<ui::prelude::Pixels>,
|
||||
request_layout: &mut Self::RequestLayoutState,
|
||||
cx: &mut ui::prelude::WindowContext,
|
||||
) -> Self::PrepaintState {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
bounds: gpui::Bounds<ui::prelude::Pixels>,
|
||||
request_layout: &mut Self::RequestLayoutState,
|
||||
prepaint: &mut Self::PrepaintState,
|
||||
cx: &mut ui::prelude::WindowContext,
|
||||
) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
176
crates/assistant2/src/tools.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::LanguageModelTool;
|
||||
use gpui::{prelude::*, AnyElement, AppContext, Model, Task};
|
||||
use project::Fs;
|
||||
use schemars::JsonSchema;
|
||||
use semantic_index::ProjectIndex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use ui::{
|
||||
div, prelude::*, CollapsibleContainer, Color, Icon, IconName, Label, SharedString,
|
||||
WindowContext,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
const DEFAULT_SEARCH_LIMIT: usize = 20;
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct CodebaseExcerpt {
|
||||
path: SharedString,
|
||||
text: SharedString,
|
||||
score: f32,
|
||||
}
|
||||
|
||||
// Note: Comments on a `LanguageModelTool::Input` become descriptions on the generated JSON schema as shown to the language model.
|
||||
// Any changes or deletions to the `CodebaseQuery` comments will change model behavior.
|
||||
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
pub struct CodebaseQuery {
|
||||
/// Semantic search query
|
||||
query: String,
|
||||
/// Maximum number of results to return, defaults to 20
|
||||
limit: Option<usize>,
|
||||
}
|
||||
|
||||
pub struct ProjectIndexTool {
|
||||
project_index: Model<ProjectIndex>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
impl ProjectIndexTool {
|
||||
pub fn new(project_index: Model<ProjectIndex>, fs: Arc<dyn Fs>) -> Self {
|
||||
// TODO: setup a better description based on the user's current codebase.
|
||||
Self { project_index, fs }
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for ProjectIndexTool {
|
||||
type Input = CodebaseQuery;
|
||||
type Output = Vec<CodebaseExcerpt>;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"query_codebase".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Semantic search against the user's current codebase, returning excerpts related to the query by computing a dot product against embeddings of chunks and an embedding of the query".to_string()
|
||||
}
|
||||
|
||||
fn execute(&self, query: &Self::Input, cx: &AppContext) -> Task<Result<Self::Output>> {
|
||||
let project_index = self.project_index.read(cx);
|
||||
|
||||
let results = project_index.search(
|
||||
query.query.as_str(),
|
||||
query.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
cx,
|
||||
);
|
||||
|
||||
let fs = self.fs.clone();
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
let results = results.await;
|
||||
|
||||
let excerpts = results.into_iter().map(|result| {
|
||||
let abs_path = result
|
||||
.worktree
|
||||
.read_with(&cx, |worktree, _| worktree.abs_path().join(&result.path));
|
||||
let fs = fs.clone();
|
||||
|
||||
async move {
|
||||
let path = result.path.clone();
|
||||
let text = fs.load(&abs_path?).await?;
|
||||
|
||||
let mut start = result.range.start;
|
||||
let mut end = result.range.end.min(text.len());
|
||||
while !text.is_char_boundary(start) {
|
||||
start += 1;
|
||||
}
|
||||
while !text.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
|
||||
anyhow::Ok(CodebaseExcerpt {
|
||||
path: path.to_string_lossy().to_string().into(),
|
||||
text: SharedString::from(text[start..end].to_string()),
|
||||
score: result.score,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
let excerpts = futures::future::join_all(excerpts)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.collect();
|
||||
anyhow::Ok(excerpts)
|
||||
})
|
||||
}
|
||||
|
||||
fn render(
|
||||
_tool_call_id: &str,
|
||||
input: &Self::Input,
|
||||
excerpts: &Self::Output,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement {
|
||||
let query = input.query.clone();
|
||||
|
||||
div()
|
||||
.v_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.child(Label::new("Query: ").color(Color::Modified))
|
||||
.child(Label::new(query).color(Color::Muted)),
|
||||
),
|
||||
)
|
||||
.children(excerpts.iter().map(|excerpt| {
|
||||
// This render doesn't have state/model, so we can't use the listener
|
||||
// let expanded = excerpt.expanded;
|
||||
// let element_id = excerpt.element_id.clone();
|
||||
let element_id = ElementId::Name(nanoid::nanoid!().into());
|
||||
let expanded = false;
|
||||
|
||||
CollapsibleContainer::new(element_id.clone(), expanded)
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::File).color(Color::Muted))
|
||||
.child(Label::new(excerpt.path.clone()).color(Color::Muted)),
|
||||
)
|
||||
// .on_click(cx.listener(move |this, _, cx| {
|
||||
// this.toggle_expanded(element_id.clone(), cx);
|
||||
// }))
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
excerpt.text.clone(), // todo!(): Show as an editor block
|
||||
),
|
||||
)
|
||||
}))
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn format(_input: &Self::Input, excerpts: &Self::Output) -> String {
|
||||
let mut body = "Semantic search results:\n".to_string();
|
||||
|
||||
for excerpt in excerpts {
|
||||
body.push_str("Excerpt from ");
|
||||
body.push_str(excerpt.path.as_ref());
|
||||
body.push_str(", score ");
|
||||
body.push_str(&excerpt.score.to_string());
|
||||
body.push_str(":\n");
|
||||
body.push_str("~~~\n");
|
||||
body.push_str(excerpt.text.as_ref());
|
||||
body.push_str("~~~\n");
|
||||
}
|
||||
body
|
||||
}
|
||||
}
|
||||
22
crates/assistant_tooling/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "assistant_tooling"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/assistant_tooling.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
gpui.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
1
crates/assistant_tooling/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
208
crates/assistant_tooling/README.md
Normal file
@@ -0,0 +1,208 @@
|
||||
# Assistant Tooling
|
||||
|
||||
Bringing OpenAI compatible tool calling to GPUI.
|
||||
|
||||
This unlocks:
|
||||
|
||||
- **Structured Extraction** of model responses
|
||||
- **Validation** of model inputs
|
||||
- **Execution** of chosen toolsn
|
||||
|
||||
## Overview
|
||||
|
||||
Language Models can produce structured outputs that are perfect for calling functions. The most famous of these is OpenAI's tool calling. When make a chat completion you can pass a list of tools available to the model. The model will choose `0..n` tools to help them complete a user's task. It's up to _you_ to create the tools that the model can call.
|
||||
|
||||
> **User**: "Hey I need help with implementing a collapsible panel in GPUI"
|
||||
>
|
||||
> **Assistant**: "Sure, I can help with that. Let me see what I can find."
|
||||
>
|
||||
> `tool_calls: ["name": "query_codebase", arguments: "{ 'query': 'GPUI collapsible panel' }"]`
|
||||
>
|
||||
> `result: "['crates/gpui/src/panel.rs:12: impl Panel { ... }', 'crates/gpui/src/panel.rs:20: impl Panel { ... }']"`
|
||||
>
|
||||
> **Assistant**: "Here are some excerpts from the GPUI codebase that might help you."
|
||||
|
||||
This library is designed to facilitate this interaction mode by allowing you to go from `struct` to `tool` with a simple trait, `LanguageModelTool`.
|
||||
|
||||
## Example
|
||||
|
||||
Let's expose querying a semantic index directly by the model. First, we'll set up some _necessary_ imports
|
||||
|
||||
```rust
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::{LanguageModelTool, ToolRegistry};
|
||||
use gpui::{App, AppContext, Task};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
```
|
||||
|
||||
Then we'll define the query structure the model must fill in. This _must_ derive `Deserialize` from `serde` and `JsonSchema` from the `schemars` crate.
|
||||
|
||||
```rust
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
struct CodebaseQuery {
|
||||
query: String,
|
||||
}
|
||||
```
|
||||
|
||||
After that we can define our tool, with the expectation that it will need a `ProjectIndex` to search against. For this example, the index uses the same interface as `semantic_index::ProjectIndex`.
|
||||
|
||||
```rust
|
||||
struct ProjectIndex {}
|
||||
|
||||
impl ProjectIndex {
|
||||
fn new() -> Self {
|
||||
ProjectIndex {}
|
||||
}
|
||||
|
||||
fn search(&self, _query: &str, _limit: usize, _cx: &AppContext) -> Task<Result<Vec<String>>> {
|
||||
// Instead of hooking up a real index, we're going to fake it
|
||||
if _query.contains("gpui") {
|
||||
return Task::ready(Ok(vec![r#"// crates/gpui/src/gpui.rs
|
||||
//! # Welcome to GPUI!
|
||||
//!
|
||||
//! GPUI is a hybrid immediate and retained mode, GPU accelerated, UI framework
|
||||
//! for Rust, designed to support a wide variety of applications
|
||||
"#
|
||||
.to_string()]));
|
||||
}
|
||||
return Task::ready(Ok(vec![]));
|
||||
}
|
||||
}
|
||||
|
||||
struct ProjectIndexTool {
|
||||
project_index: ProjectIndex,
|
||||
}
|
||||
```
|
||||
|
||||
Now we can implement the `LanguageModelTool` trait for our tool by:
|
||||
|
||||
- Defining the `Input` from the model, which is `CodebaseQuery`
|
||||
- Defining the `Output`
|
||||
- Implementing the `name` and `description` functions to provide the model information when it's choosing a tool
|
||||
- Implementing the `execute` function to run the tool
|
||||
|
||||
```rust
|
||||
impl LanguageModelTool for ProjectIndexTool {
|
||||
type Input = CodebaseQuery;
|
||||
type Output = String;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"query_codebase".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Executes a query against the codebase, returning excerpts related to the query".to_string()
|
||||
}
|
||||
|
||||
fn execute(&self, query: Self::Input, cx: &AppContext) -> Task<Result<Self::Output>> {
|
||||
let results = self.project_index.search(query.query.as_str(), 10, cx);
|
||||
|
||||
cx.spawn(|_cx| async move {
|
||||
let results = results.await?;
|
||||
|
||||
if !results.is_empty() {
|
||||
Ok(results.join("\n"))
|
||||
} else {
|
||||
Ok("No results".to_string())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For the sake of this example, let's look at the types that OpenAI will be passing to us
|
||||
|
||||
```rust
|
||||
// OpenAI definitions, shown here for demonstration
|
||||
#[derive(Deserialize)]
|
||||
struct FunctionCall {
|
||||
name: String,
|
||||
args: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Eq, PartialEq)]
|
||||
enum ToolCallType {
|
||||
#[serde(rename = "function")]
|
||||
Function,
|
||||
Other,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
struct ToolCallId(String);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum ToolCall {
|
||||
Function {
|
||||
#[allow(dead_code)]
|
||||
id: ToolCallId,
|
||||
function: FunctionCall,
|
||||
},
|
||||
Other {
|
||||
#[allow(dead_code)]
|
||||
id: ToolCallId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct AssistantMessage {
|
||||
role: String,
|
||||
content: Option<String>,
|
||||
tool_calls: Option<Vec<ToolCall>>,
|
||||
}
|
||||
```
|
||||
|
||||
When the model wants to call tools, it will pass a list of `ToolCall`s. When those are `function`s that we can handle, we'll pass them to our `ToolRegistry` to get a future that we can await.
|
||||
|
||||
```rust
|
||||
// Inside `fn main()`
|
||||
App::new().run(|cx: &mut AppContext| {
|
||||
let tool = ProjectIndexTool {
|
||||
project_index: ProjectIndex::new(),
|
||||
};
|
||||
|
||||
let mut registry = ToolRegistry::new();
|
||||
let registered = registry.register(tool);
|
||||
assert!(registered.is_ok());
|
||||
```
|
||||
|
||||
Let's pretend the model sent us back a message requesting
|
||||
|
||||
```rust
|
||||
let model_response = json!({
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"id": "call_1",
|
||||
"function": {
|
||||
"name": "query_codebase",
|
||||
"args": r#"{"query":"GPUI Task background_executor"}"#
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
let message: AssistantMessage = serde_json::from_value(model_response).unwrap();
|
||||
|
||||
// We know there's a tool call, so let's skip straight to it for this example
|
||||
let tool_calls = message.tool_calls.as_ref().unwrap();
|
||||
let tool_call = tool_calls.get(0).unwrap();
|
||||
```
|
||||
|
||||
We can now use our registry to call the tool.
|
||||
|
||||
```rust
|
||||
let task = registry.call(
|
||||
tool_call.name,
|
||||
tool_call.args,
|
||||
);
|
||||
|
||||
cx.spawn(|_cx| async move {
|
||||
let result = task.await?;
|
||||
println!("{}", result.unwrap());
|
||||
Ok(())
|
||||
})
|
||||
```
|
||||
5
crates/assistant_tooling/src/assistant_tooling.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod registry;
|
||||
pub mod tool;
|
||||
|
||||
pub use crate::registry::ToolRegistry;
|
||||
pub use crate::tool::{LanguageModelTool, ToolFunctionCall, ToolFunctionDefinition};
|
||||
299
crates/assistant_tooling/src/registry.rs
Normal file
@@ -0,0 +1,299 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AnyElement, AppContext, Task, WindowContext};
|
||||
use std::{any::Any, collections::HashMap};
|
||||
|
||||
use crate::tool::{
|
||||
LanguageModelTool, ToolFunctionCall, ToolFunctionCallResult, ToolFunctionDefinition,
|
||||
};
|
||||
|
||||
pub struct ToolRegistry {
|
||||
tools: HashMap<String, Box<dyn Fn(&ToolFunctionCall, &AppContext) -> Task<ToolFunctionCall>>>,
|
||||
definitions: Vec<ToolFunctionDefinition>,
|
||||
}
|
||||
|
||||
impl ToolRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tools: HashMap::new(),
|
||||
definitions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> &[ToolFunctionDefinition] {
|
||||
&self.definitions
|
||||
}
|
||||
|
||||
pub fn register<T: 'static + LanguageModelTool>(&mut self, tool: T) -> Result<()> {
|
||||
fn render<T: 'static + LanguageModelTool>(
|
||||
tool_call_id: &str,
|
||||
input: &Box<dyn Any>,
|
||||
output: &Box<dyn Any>,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement {
|
||||
T::render(
|
||||
tool_call_id,
|
||||
input.as_ref().downcast_ref::<T::Input>().unwrap(),
|
||||
output.as_ref().downcast_ref::<T::Output>().unwrap(),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn format<T: 'static + LanguageModelTool>(
|
||||
input: &Box<dyn Any>,
|
||||
output: &Box<dyn Any>,
|
||||
) -> String {
|
||||
T::format(
|
||||
input.as_ref().downcast_ref::<T::Input>().unwrap(),
|
||||
output.as_ref().downcast_ref::<T::Output>().unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
self.definitions.push(tool.definition());
|
||||
let name = tool.name();
|
||||
let previous = self.tools.insert(
|
||||
name.clone(),
|
||||
Box::new(move |tool_call: &ToolFunctionCall, cx: &AppContext| {
|
||||
let name = tool_call.name.clone();
|
||||
let arguments = tool_call.arguments.clone();
|
||||
let id = tool_call.id.clone();
|
||||
|
||||
let Ok(input) = serde_json::from_str::<T::Input>(arguments.as_str()) else {
|
||||
return Task::ready(ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::ParsingFailed),
|
||||
});
|
||||
};
|
||||
|
||||
let result = tool.execute(&input, cx);
|
||||
|
||||
cx.spawn(move |_cx| async move {
|
||||
match result.await {
|
||||
Ok(result) => {
|
||||
let result: T::Output = result;
|
||||
ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::Finished {
|
||||
input: Box::new(input),
|
||||
output: Box::new(result),
|
||||
render_fn: render::<T>,
|
||||
format_fn: format::<T>,
|
||||
}),
|
||||
}
|
||||
}
|
||||
Err(_error) => ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::ExecutionFailed {
|
||||
input: Box::new(input),
|
||||
}),
|
||||
},
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
if previous.is_some() {
|
||||
return Err(anyhow!("already registered a tool with name {}", name));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn call(&self, tool_call: &ToolFunctionCall, cx: &AppContext) -> Task<ToolFunctionCall> {
|
||||
let name = tool_call.name.clone();
|
||||
let arguments = tool_call.arguments.clone();
|
||||
let id = tool_call.id.clone();
|
||||
|
||||
let tool = match self.tools.get(&name) {
|
||||
Some(tool) => tool,
|
||||
None => {
|
||||
let name = name.clone();
|
||||
return Task::ready(ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::NoSuchTool),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
tool(tool_call, cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use super::*;
|
||||
|
||||
use schemars::schema_for;
|
||||
|
||||
use gpui::{div, AnyElement, Element, ParentElement, TestAppContext, WindowContext};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema)]
|
||||
struct WeatherQuery {
|
||||
location: String,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
struct WeatherTool {
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]
|
||||
struct WeatherResult {
|
||||
location: String,
|
||||
temperature: f64,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for WeatherTool {
|
||||
type Input = WeatherQuery;
|
||||
type Output = WeatherResult;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"get_current_weather".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Fetches the current weather for a given location.".to_string()
|
||||
}
|
||||
|
||||
fn execute(&self, input: &WeatherQuery, _cx: &AppContext) -> Task<Result<Self::Output>> {
|
||||
let _location = input.location.clone();
|
||||
let _unit = input.unit.clone();
|
||||
|
||||
let weather = self.current_weather.clone();
|
||||
|
||||
Task::ready(Ok(weather))
|
||||
}
|
||||
|
||||
fn render(
|
||||
_tool_call_id: &str,
|
||||
_input: &Self::Input,
|
||||
output: &Self::Output,
|
||||
_cx: &mut WindowContext,
|
||||
) -> AnyElement {
|
||||
div()
|
||||
.child(format!(
|
||||
"The current temperature in {} is {} {}",
|
||||
output.location, output.temperature, output.unit
|
||||
))
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn format(_input: &Self::Input, output: &Self::Output) -> String {
|
||||
format!(
|
||||
"The current temperature in {} is {} {}",
|
||||
output.location, output.temperature, output.unit
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_function_registry(cx: &mut TestAppContext) {
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let mut registry = ToolRegistry::new();
|
||||
|
||||
let tool = WeatherTool {
|
||||
current_weather: WeatherResult {
|
||||
location: "San Francisco".to_string(),
|
||||
temperature: 21.0,
|
||||
unit: "Celsius".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
registry.register(tool).unwrap();
|
||||
|
||||
let _result = cx
|
||||
.update(|cx| {
|
||||
registry.call(
|
||||
&ToolFunctionCall {
|
||||
name: "get_current_weather".to_string(),
|
||||
arguments: r#"{ "location": "San Francisco", "unit": "Celsius" }"#
|
||||
.to_string(),
|
||||
id: "test-123".to_string(),
|
||||
result: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
// assert!(result.is_ok());
|
||||
// let result = result.unwrap();
|
||||
|
||||
// let expected = r#"{"location":"San Francisco","temperature":21.0,"unit":"Celsius"}"#;
|
||||
|
||||
// todo!(): Put this back in after the interface is stabilized
|
||||
// assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_openai_weather_example(cx: &mut TestAppContext) {
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let tool = WeatherTool {
|
||||
current_weather: WeatherResult {
|
||||
location: "San Francisco".to_string(),
|
||||
temperature: 21.0,
|
||||
unit: "Celsius".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
let tools = vec![tool.definition()];
|
||||
assert_eq!(tools.len(), 1);
|
||||
|
||||
let expected = ToolFunctionDefinition {
|
||||
name: "get_current_weather".to_string(),
|
||||
description: "Fetches the current weather for a given location.".to_string(),
|
||||
parameters: schema_for!(WeatherQuery),
|
||||
};
|
||||
|
||||
assert_eq!(tools[0].name, expected.name);
|
||||
assert_eq!(tools[0].description, expected.description);
|
||||
|
||||
let expected_schema = serde_json::to_value(&tools[0].parameters).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expected_schema,
|
||||
json!({
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "WeatherQuery",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["location", "unit"]
|
||||
})
|
||||
);
|
||||
|
||||
let args = json!({
|
||||
"location": "San Francisco",
|
||||
"unit": "Celsius"
|
||||
});
|
||||
|
||||
let query: WeatherQuery = serde_json::from_value(args).unwrap();
|
||||
|
||||
let result = cx.update(|cx| tool.execute(&query, cx)).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let result = result.unwrap();
|
||||
|
||||
assert_eq!(result, tool.current_weather);
|
||||
}
|
||||
}
|
||||
160
crates/assistant_tooling/src/tool.rs
Normal file
@@ -0,0 +1,160 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{div, AnyElement, AppContext, Element, ParentElement as _, Task, WindowContext};
|
||||
use schemars::{schema::RootSchema, schema_for, JsonSchema};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
any::Any,
|
||||
fmt::{Debug, Display},
|
||||
};
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct ToolFunctionCall {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
#[serde(skip)]
|
||||
pub result: Option<ToolFunctionCallResult>,
|
||||
}
|
||||
|
||||
pub enum ToolFunctionCallResult {
|
||||
NoSuchTool,
|
||||
ParsingFailed,
|
||||
ExecutionFailed {
|
||||
input: Box<dyn Any>,
|
||||
},
|
||||
Finished {
|
||||
input: Box<dyn Any>,
|
||||
output: Box<dyn Any>,
|
||||
render_fn: fn(
|
||||
// tool_call_id
|
||||
&str,
|
||||
// LanguageModelTool::Input
|
||||
&Box<dyn Any>,
|
||||
// LanguageModelTool::Output
|
||||
&Box<dyn Any>,
|
||||
&mut WindowContext,
|
||||
) -> AnyElement,
|
||||
format_fn: fn(
|
||||
// LanguageModelTool::Input
|
||||
&Box<dyn Any>,
|
||||
// LanguageModelTool::Output
|
||||
&Box<dyn Any>,
|
||||
) -> String,
|
||||
},
|
||||
}
|
||||
|
||||
impl ToolFunctionCallResult {
|
||||
pub fn render(
|
||||
&self,
|
||||
tool_name: &str,
|
||||
tool_call_id: &str,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement {
|
||||
match self {
|
||||
ToolFunctionCallResult::NoSuchTool => {
|
||||
div().child(format!("no such tool {tool_name}")).into_any()
|
||||
}
|
||||
ToolFunctionCallResult::ParsingFailed => div()
|
||||
.child(format!("failed to parse input for tool {tool_name}"))
|
||||
.into_any(),
|
||||
ToolFunctionCallResult::ExecutionFailed { .. } => div()
|
||||
.child(format!("failed to execute tool {tool_name}"))
|
||||
.into_any(),
|
||||
ToolFunctionCallResult::Finished {
|
||||
input,
|
||||
output,
|
||||
render_fn,
|
||||
..
|
||||
} => render_fn(tool_call_id, input, output, cx),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format(&self, tool: &str) -> String {
|
||||
match self {
|
||||
ToolFunctionCallResult::NoSuchTool => format!("no such tool {tool}"),
|
||||
ToolFunctionCallResult::ParsingFailed => {
|
||||
format!("failed to parse input for tool {tool}")
|
||||
}
|
||||
ToolFunctionCallResult::ExecutionFailed { input: _input } => {
|
||||
format!("failed to execute tool {tool}")
|
||||
}
|
||||
ToolFunctionCallResult::Finished {
|
||||
input,
|
||||
output,
|
||||
format_fn,
|
||||
..
|
||||
} => format_fn(input, output),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: RootSchema,
|
||||
}
|
||||
|
||||
impl Display for ToolFunctionDefinition {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let schema = serde_json::to_string(&self.parameters).ok();
|
||||
let schema = schema.unwrap_or("None".to_string());
|
||||
write!(f, "Name: {}:\n", self.name)?;
|
||||
write!(f, "Description: {}\n", self.description)?;
|
||||
write!(f, "Parameters: {}", schema)
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for ToolFunctionDefinition {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let schema = serde_json::to_string(&self.parameters).ok();
|
||||
let schema = schema.unwrap_or("None".to_string());
|
||||
|
||||
f.debug_struct("ToolFunctionDefinition")
|
||||
.field("name", &self.name)
|
||||
.field("description", &self.description)
|
||||
.field("parameters", &schema)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LanguageModelTool {
|
||||
/// The input type that will be passed in to `execute` when the tool is called
|
||||
/// by the language model.
|
||||
type Input: for<'de> Deserialize<'de> + JsonSchema;
|
||||
|
||||
/// The output returned by executing the tool.
|
||||
type Output: 'static;
|
||||
|
||||
/// The name of the tool is exposed to the language model to allow
|
||||
/// the model to pick which tools to use. As this name is used to
|
||||
/// identify the tool within a tool registry, it should be unique.
|
||||
fn name(&self) -> String;
|
||||
|
||||
/// A description of the tool that can be used to _prompt_ the model
|
||||
/// as to what the tool does.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// The OpenAI Function definition for the tool, for direct use with OpenAI's API.
|
||||
fn definition(&self) -> ToolFunctionDefinition {
|
||||
let root_schema = schema_for!(Self::Input);
|
||||
|
||||
ToolFunctionDefinition {
|
||||
name: self.name(),
|
||||
description: self.description(),
|
||||
parameters: root_schema,
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute the tool
|
||||
fn execute(&self, input: &Self::Input, cx: &AppContext) -> Task<Result<Self::Output>>;
|
||||
|
||||
fn render(
|
||||
tool_call_id: &str,
|
||||
input: &Self::Input,
|
||||
output: &Self::Output,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement;
|
||||
|
||||
fn format(input: &Self::Input, output: &Self::Output) -> String;
|
||||
}
|
||||
@@ -52,12 +52,19 @@ impl Render for Breadcrumbs {
|
||||
Some(BreadcrumbText {
|
||||
text: "⋯".into(),
|
||||
highlights: None,
|
||||
font: None,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
let highlighted_segments = segments.into_iter().map(|segment| {
|
||||
let mut text_style = cx.text_style();
|
||||
if let Some(font) = segment.font {
|
||||
text_style.font_family = font.family;
|
||||
text_style.font_features = font.features;
|
||||
text_style.font_style = font.style;
|
||||
text_style.font_weight = font.weight;
|
||||
}
|
||||
text_style.color = Color::Muted.color(cx);
|
||||
|
||||
StyledText::new(segment.text.replace('\n', ""))
|
||||
|
||||
@@ -1203,14 +1203,24 @@ impl Room {
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
return Task::ready(Ok(project_id));
|
||||
}
|
||||
let request = if let Some(remote_project_id) = project.read(cx).remote_project_id() {
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: vec![],
|
||||
remote_project_id: Some(remote_project_id.0),
|
||||
})
|
||||
} else {
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
return Task::ready(Ok(project_id));
|
||||
}
|
||||
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
remote_project_id: None,
|
||||
})
|
||||
};
|
||||
|
||||
let request = self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
});
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
|
||||
|
||||
@@ -11,9 +11,7 @@ pub use channel_chat::{
|
||||
mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
|
||||
MessageParams,
|
||||
};
|
||||
pub use channel_store::{
|
||||
Channel, ChannelEvent, ChannelMembership, ChannelStore, DevServer, RemoteProject,
|
||||
};
|
||||
pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore};
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
@@ -3,10 +3,7 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{
|
||||
ChannelId, Client, ClientSettings, DevServerId, ProjectId, RemoteProjectId, Subscription, User,
|
||||
UserId, UserStore,
|
||||
};
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -15,7 +12,7 @@ use gpui::{
|
||||
};
|
||||
use language::Capability;
|
||||
use rpc::{
|
||||
proto::{self, ChannelRole, ChannelVisibility, DevServerStatus},
|
||||
proto::{self, ChannelRole, ChannelVisibility},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use settings::Settings;
|
||||
@@ -53,57 +50,12 @@ impl From<proto::HostedProject> for HostedProject {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteProject {
|
||||
pub id: RemoteProjectId,
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: SharedString,
|
||||
pub path: SharedString,
|
||||
pub dev_server_id: DevServerId,
|
||||
}
|
||||
|
||||
impl From<proto::RemoteProject> for RemoteProject {
|
||||
fn from(project: proto::RemoteProject) -> Self {
|
||||
Self {
|
||||
id: RemoteProjectId(project.id),
|
||||
project_id: project.project_id.map(|id| ProjectId(id)),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
name: project.name.into(),
|
||||
path: project.path.into(),
|
||||
dev_server_id: DevServerId(project.dev_server_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DevServer {
|
||||
pub id: DevServerId,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: SharedString,
|
||||
pub status: DevServerStatus,
|
||||
}
|
||||
|
||||
impl From<proto::DevServer> for DevServer {
|
||||
fn from(dev_server: proto::DevServer) -> Self {
|
||||
Self {
|
||||
id: DevServerId(dev_server.dev_server_id),
|
||||
channel_id: ChannelId(dev_server.channel_id),
|
||||
status: dev_server.status(),
|
||||
name: dev_server.name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChannelStore {
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
remote_projects: HashMap<RemoteProjectId, RemoteProject>,
|
||||
dev_servers: HashMap<DevServerId, DevServer>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
@@ -133,8 +85,6 @@ pub struct ChannelState {
|
||||
observed_chat_message: Option<u64>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<ProjectId>,
|
||||
dev_servers: HashSet<DevServerId>,
|
||||
remote_projects: HashSet<RemoteProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -265,8 +215,6 @@ impl ChannelStore {
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
hosted_projects: Default::default(),
|
||||
remote_projects: Default::default(),
|
||||
dev_servers: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -366,40 +314,6 @@ impl ChannelStore {
|
||||
projects
|
||||
}
|
||||
|
||||
pub fn dev_servers_for_id(&self, channel_id: ChannelId) -> Vec<DevServer> {
|
||||
let mut dev_servers: Vec<DevServer> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.dev_servers.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| self.dev_servers.get(&id).cloned())
|
||||
.collect();
|
||||
dev_servers.sort_by_key(|s| (s.name.clone(), s.id));
|
||||
dev_servers
|
||||
}
|
||||
|
||||
pub fn find_dev_server_by_id(&self, id: DevServerId) -> Option<&DevServer> {
|
||||
self.dev_servers.get(&id)
|
||||
}
|
||||
|
||||
pub fn find_remote_project_by_id(&self, id: RemoteProjectId) -> Option<&RemoteProject> {
|
||||
self.remote_projects.get(&id)
|
||||
}
|
||||
|
||||
pub fn remote_projects_for_id(&self, channel_id: ChannelId) -> Vec<RemoteProject> {
|
||||
let mut remote_projects: Vec<RemoteProject> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.remote_projects.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| self.remote_projects.get(&id).cloned())
|
||||
.collect();
|
||||
remote_projects.sort_by_key(|p| (p.name.clone(), p.id));
|
||||
remote_projects
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedModelHandle::Open(buffer) = buffer {
|
||||
@@ -901,46 +815,6 @@ impl ChannelStore {
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_remote_project(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
dev_server_id: DevServerId,
|
||||
name: String,
|
||||
path: String,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<proto::CreateRemoteProjectResponse>> {
|
||||
let client = self.client.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
client
|
||||
.request(proto::CreateRemoteProject {
|
||||
channel_id: channel_id.0,
|
||||
dev_server_id: dev_server_id.0,
|
||||
name,
|
||||
path,
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_dev_server(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
name: String,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<proto::CreateDevServerResponse>> {
|
||||
let client = self.client.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
let result = client
|
||||
.request(proto::CreateDevServer {
|
||||
channel_id: channel_id.0,
|
||||
name,
|
||||
})
|
||||
.await?;
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_channel_member_details(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@@ -1221,11 +1095,7 @@ impl ChannelStore {
|
||||
|| !payload.latest_channel_message_ids.is_empty()
|
||||
|| !payload.latest_channel_buffer_versions.is_empty()
|
||||
|| !payload.hosted_projects.is_empty()
|
||||
|| !payload.deleted_hosted_projects.is_empty()
|
||||
|| !payload.dev_servers.is_empty()
|
||||
|| !payload.deleted_dev_servers.is_empty()
|
||||
|| !payload.remote_projects.is_empty()
|
||||
|| !payload.deleted_remote_projects.is_empty();
|
||||
|| !payload.deleted_hosted_projects.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
@@ -1313,60 +1183,6 @@ impl ChannelStore {
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
|
||||
for remote_project in payload.remote_projects {
|
||||
let remote_project: RemoteProject = remote_project.into();
|
||||
if let Some(old_remote_project) = self
|
||||
.remote_projects
|
||||
.insert(remote_project.id, remote_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_remote_project.channel_id)
|
||||
.or_default()
|
||||
.remove_remote_project(old_remote_project.id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(remote_project.channel_id)
|
||||
.or_default()
|
||||
.add_remote_project(remote_project.id);
|
||||
}
|
||||
|
||||
for remote_project_id in payload.deleted_remote_projects {
|
||||
let remote_project_id = RemoteProjectId(remote_project_id);
|
||||
|
||||
if let Some(old_project) = self.remote_projects.remove(&remote_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_remote_project(old_project.id);
|
||||
}
|
||||
}
|
||||
|
||||
for dev_server in payload.dev_servers {
|
||||
let dev_server: DevServer = dev_server.into();
|
||||
if let Some(old_server) = self.dev_servers.insert(dev_server.id, dev_server.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_server.channel_id)
|
||||
.or_default()
|
||||
.remove_dev_server(old_server.id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(dev_server.channel_id)
|
||||
.or_default()
|
||||
.add_dev_server(dev_server.id);
|
||||
}
|
||||
|
||||
for dev_server_id in payload.deleted_dev_servers {
|
||||
let dev_server_id = DevServerId(dev_server_id);
|
||||
|
||||
if let Some(old_server) = self.dev_servers.remove(&dev_server_id) {
|
||||
self.channel_states
|
||||
.entry(old_server.channel_id)
|
||||
.or_default()
|
||||
.remove_dev_server(old_server.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1481,20 +1297,4 @@ impl ChannelState {
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
|
||||
fn add_remote_project(&mut self, remote_project_id: RemoteProjectId) {
|
||||
self.remote_projects.insert(remote_project_id);
|
||||
}
|
||||
|
||||
fn remove_remote_project(&mut self, remote_project_id: RemoteProjectId) {
|
||||
self.remote_projects.remove(&remote_project_id);
|
||||
}
|
||||
|
||||
fn add_dev_server(&mut self, dev_server_id: DevServerId) {
|
||||
self.dev_servers.insert(dev_server_id);
|
||||
}
|
||||
|
||||
fn remove_dev_server(&mut self, dev_server_id: DevServerId) {
|
||||
self.dev_servers.remove(&dev_server_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -264,7 +264,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
channel.next_event(cx),
|
||||
channel.next_event(cx).await,
|
||||
ChannelChatEvent::MessagesUpdated {
|
||||
old_range: 2..2,
|
||||
new_count: 1,
|
||||
@@ -317,7 +317,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
channel.next_event(cx),
|
||||
channel.next_event(cx).await,
|
||||
ChannelChatEvent::MessagesUpdated {
|
||||
old_range: 0..0,
|
||||
new_count: 2,
|
||||
|
||||
@@ -20,6 +20,7 @@ path = "src/main.rs"
|
||||
anyhow.workspace = true
|
||||
clap.workspace = true
|
||||
ipc-channel = "0.18"
|
||||
release_channel.workspace = true
|
||||
serde.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use serde::Deserialize;
|
||||
use std::{
|
||||
env,
|
||||
ffi::OsStr,
|
||||
fs::{self},
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use util::paths::PathLikeWithPosition;
|
||||
@@ -36,6 +36,9 @@ struct Args {
|
||||
/// Custom Zed.app path
|
||||
#[arg(short, long)]
|
||||
bundle_path: Option<PathBuf>,
|
||||
/// Run zed in dev-server mode
|
||||
#[arg(long)]
|
||||
dev_server_token: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_path_with_position(
|
||||
@@ -53,10 +56,24 @@ struct InfoPlist {
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Intercept version designators
|
||||
#[cfg(target_os = "macos")]
|
||||
if let Some(channel) = std::env::args().nth(1).filter(|arg| arg.starts_with("--")) {
|
||||
// When the first argument is a name of a release channel, we're gonna spawn off a cli of that version, with trailing args passed along.
|
||||
use std::str::FromStr as _;
|
||||
|
||||
if let Ok(channel) = release_channel::ReleaseChannel::from_str(&channel[2..]) {
|
||||
return mac_os::spawn_channel_cli(channel, std::env::args().skip(2).collect());
|
||||
}
|
||||
}
|
||||
let args = Args::parse();
|
||||
|
||||
let bundle = Bundle::detect(args.bundle_path.as_deref()).context("Bundle detection")?;
|
||||
|
||||
if let Some(dev_server_token) = args.dev_server_token {
|
||||
return bundle.spawn(vec!["--dev-server-token".into(), dev_server_token]);
|
||||
}
|
||||
|
||||
if args.version {
|
||||
println!("{}", bundle.zed_version_string());
|
||||
return Ok(());
|
||||
@@ -159,6 +176,10 @@ mod linux {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn spawn(&self, _args: Vec<String>) -> anyhow::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn zed_version_string(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
@@ -192,6 +213,10 @@ mod windows {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn spawn(&self, _args: Vec<String>) -> anyhow::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn zed_version_string(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
@@ -200,14 +225,14 @@ mod windows {
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod mac_os {
|
||||
use anyhow::Context;
|
||||
use anyhow::{Context, Result};
|
||||
use core_foundation::{
|
||||
array::{CFArray, CFIndex},
|
||||
string::kCFStringEncodingUTF8,
|
||||
url::{CFURLCreateWithBytes, CFURL},
|
||||
};
|
||||
use core_services::{kLSLaunchDefaults, LSLaunchURLSpec, LSOpenFromURLSpec, TCFType};
|
||||
use std::{fs, path::Path, ptr};
|
||||
use std::{fs, path::Path, process::Command, ptr};
|
||||
|
||||
use cli::{CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME};
|
||||
use ipc_channel::ipc::{IpcOneShotServer, IpcReceiver, IpcSender};
|
||||
@@ -268,6 +293,15 @@ mod mac_os {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(&self, args: Vec<String>) -> Result<()> {
|
||||
let path = match self {
|
||||
Self::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed"),
|
||||
Self::LocalPath { executable, .. } => executable.clone(),
|
||||
};
|
||||
Command::new(path).args(args).status()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn launch(&self) -> anyhow::Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
|
||||
let (server, server_name) =
|
||||
IpcOneShotServer::<IpcHandshake>::new().context("Handshake before Zed spawn")?;
|
||||
@@ -348,4 +382,33 @@ mod mac_os {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn spawn_channel_cli(
|
||||
channel: release_channel::ReleaseChannel,
|
||||
leftover_args: Vec<String>,
|
||||
) -> Result<()> {
|
||||
use anyhow::bail;
|
||||
|
||||
let app_id_prompt = format!("id of app \"{}\"", channel.display_name());
|
||||
let app_id_output = Command::new("osascript")
|
||||
.arg("-e")
|
||||
.arg(&app_id_prompt)
|
||||
.output()?;
|
||||
if !app_id_output.status.success() {
|
||||
bail!("Could not determine app id for {}", channel.display_name());
|
||||
}
|
||||
let app_name = String::from_utf8(app_id_output.stdout)?.trim().to_owned();
|
||||
let app_path_prompt = format!("kMDItemCFBundleIdentifier == '{app_name}'");
|
||||
let app_path_output = Command::new("mdfind").arg(app_path_prompt).output()?;
|
||||
if !app_path_output.status.success() {
|
||||
bail!(
|
||||
"Could not determine app path for {}",
|
||||
channel.display_name()
|
||||
);
|
||||
}
|
||||
let app_path = String::from_utf8(app_path_output.stdout)?.trim().to_owned();
|
||||
let cli_path = format!("{app_path}/Contents/MacOS/cli");
|
||||
Command::new(cli_path).args(leftover_args).spawn()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,7 +132,7 @@ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
|
||||
move |_: &SignOut, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(|cx| async move {
|
||||
client.disconnect(&cx);
|
||||
client.sign_out(&cx).await;
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
@@ -457,6 +457,14 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn production(cx: &mut AppContext) -> Arc<Self> {
|
||||
let clock = Arc::new(clock::RealSystemClock);
|
||||
let http = Arc::new(HttpClientWithUrl::new(
|
||||
&ClientSettings::get_global(cx).server_url,
|
||||
));
|
||||
Self::new(clock, http.clone(), cx)
|
||||
}
|
||||
|
||||
pub fn id(&self) -> u64 {
|
||||
self.id.load(Ordering::SeqCst)
|
||||
}
|
||||
@@ -1119,6 +1127,8 @@ impl Client {
|
||||
if let Some((login, token)) =
|
||||
IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref())
|
||||
{
|
||||
eprintln!("authenticate as admin {login}, {token}");
|
||||
|
||||
return Self::authenticate_as_admin(http, login.clone(), token.clone())
|
||||
.await;
|
||||
}
|
||||
@@ -1250,6 +1260,15 @@ impl Client {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn sign_out(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.state.write().credentials = None;
|
||||
self.disconnect(&cx);
|
||||
|
||||
if self.has_keychain_credentials(cx).await {
|
||||
delete_credentials_from_keychain(cx).await.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disconnect(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.peer.teardown();
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
|
||||
@@ -30,7 +30,9 @@ pub struct ProjectId(pub u64);
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct DevServerId(pub u64);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
pub struct RemoteProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
||||
@@ -64,7 +64,7 @@ toml.workspace = true
|
||||
tower = "0.4"
|
||||
tower-http = { workspace = true, features = ["trace"] }
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = { git = "https://github.com/tokio-rs/tracing", rev = "tracing-subscriber-0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
@@ -93,6 +93,7 @@ notifications = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
release_channel.workspace = true
|
||||
remote_projects.workspace = true
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -6,7 +6,43 @@ It contains our back-end logic for collaboration, to which we connect from the Z
|
||||
|
||||
# Local Development
|
||||
|
||||
Detailed instructions on getting started are [here](https://zed.dev/docs/local-collaboration).
|
||||
## Database setup
|
||||
|
||||
Before you can run the collab server locally, you'll need to set up a zed Postgres database.
|
||||
|
||||
```
|
||||
script/bootstrap
|
||||
```
|
||||
|
||||
This script will set up the `zed` Postgres database, and populate it with some users. It requires internet access, because it fetches some users from the GitHub API.
|
||||
|
||||
The script will create several _admin_ users, who you'll sign in as by default when developing locally. The GitHub logins for the default users are specified in the `seed.default.json` file.
|
||||
|
||||
To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
|
||||
```json
|
||||
{
|
||||
"admins": ["yourgithubhere"],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 20
|
||||
}
|
||||
```
|
||||
|
||||
## Testing collaborative features locally
|
||||
|
||||
In one terminal, run Zed's collaboration server and the livekit dev server:
|
||||
|
||||
```
|
||||
foreman start
|
||||
```
|
||||
|
||||
In a second terminal, run two or more instances of Zed.
|
||||
|
||||
```
|
||||
script/zed-local -2
|
||||
```
|
||||
|
||||
This script starts one to four instances of Zed, depending on the `-2`, `-3` or `-4` flags. Each instance will be connected to the local `collab` server, signed in as a different user from `seed.json` or `seed.default.json`.
|
||||
|
||||
# Deployment
|
||||
|
||||
|
||||
@@ -398,26 +398,21 @@ CREATE TABLE hosted_projects (
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
name TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL,
|
||||
deleted_at TIMESTAMP NULL,
|
||||
dev_server_id INTEGER REFERENCES dev_servers(id),
|
||||
dev_server_path TEXT
|
||||
deleted_at TIMESTAMP NULL
|
||||
);
|
||||
CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id);
|
||||
CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL);
|
||||
|
||||
CREATE TABLE dev_servers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||
name TEXT NOT NULL,
|
||||
hashed_token TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX idx_dev_servers_on_channel_id ON dev_servers (channel_id);
|
||||
|
||||
CREATE TABLE remote_projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
|
||||
name TEXT NOT NULL,
|
||||
path TEXT NOT NULL
|
||||
);
|
||||
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE IF NOT EXISTS "embeddings" (
|
||||
"model" TEXT,
|
||||
"digest" BYTEA,
|
||||
"dimensions" FLOAT4[1536],
|
||||
"retrieved_at" TIMESTAMP NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY ("model", "digest")
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS "idx_retrieved_at_on_embeddings" ON "embeddings" ("retrieved_at");
|
||||
@@ -0,0 +1,7 @@
|
||||
DELETE FROM remote_projects;
|
||||
DELETE FROM dev_servers;
|
||||
|
||||
ALTER TABLE dev_servers DROP COLUMN channel_id;
|
||||
ALTER TABLE dev_servers ADD COLUMN user_id INT NOT NULL REFERENCES users(id);
|
||||
|
||||
ALTER TABLE remote_projects DROP COLUMN channel_id;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE remote_projects DROP COLUMN name;
|
||||
ALTER TABLE remote_projects
|
||||
ADD CONSTRAINT unique_path_constraint UNIQUE(dev_server_id, path);
|
||||
@@ -5,7 +5,8 @@
|
||||
"maxbrunsfeld",
|
||||
"iamnbutler",
|
||||
"mikayla-maki",
|
||||
"JosephTLyons"
|
||||
"JosephTLyons",
|
||||
"rgbkrk"
|
||||
],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 100
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use rpc::proto;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub fn language_model_request_to_open_ai(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
@@ -9,24 +10,83 @@ pub fn language_model_request_to_open_ai(
|
||||
messages: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message| {
|
||||
.map(|message: proto::LanguageModelRequestMessage| {
|
||||
let role = proto::LanguageModelRole::from_i32(message.role)
|
||||
.ok_or_else(|| anyhow!("invalid role {}", message.role))?;
|
||||
Ok(open_ai::RequestMessage {
|
||||
role: match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => open_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::Role::Assistant
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => open_ai::Role::System,
|
||||
|
||||
let openai_message = match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => open_ai::RequestMessage::User {
|
||||
content: message.content,
|
||||
},
|
||||
content: message.content,
|
||||
})
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::RequestMessage::Assistant {
|
||||
content: Some(message.content),
|
||||
tool_calls: message
|
||||
.tool_calls
|
||||
.into_iter()
|
||||
.filter_map(|call| {
|
||||
Some(open_ai::ToolCall {
|
||||
id: call.id,
|
||||
content: match call.variant? {
|
||||
proto::tool_call::Variant::Function(f) => {
|
||||
open_ai::ToolCallContent::Function {
|
||||
function: open_ai::FunctionContent {
|
||||
name: f.name,
|
||||
arguments: f.arguments,
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => {
|
||||
open_ai::RequestMessage::System {
|
||||
content: message.content,
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelTool => open_ai::RequestMessage::Tool {
|
||||
tool_call_id: message
|
||||
.tool_call_id
|
||||
.ok_or_else(|| anyhow!("tool message is missing tool call id"))?,
|
||||
content: message.content,
|
||||
},
|
||||
};
|
||||
|
||||
Ok(openai_message)
|
||||
})
|
||||
.collect::<Result<Vec<open_ai::RequestMessage>>>()?,
|
||||
stream: true,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tools: request
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
Some(match tool.variant? {
|
||||
proto::chat_completion_tool::Variant::Function(f) => {
|
||||
open_ai::ToolDefinition::Function {
|
||||
function: open_ai::FunctionDefinition {
|
||||
name: f.name,
|
||||
description: f.description,
|
||||
parameters: if let Some(params) = &f.parameters {
|
||||
Some(
|
||||
serde_json::from_str(params)
|
||||
.context("failed to deserialize tool parameters")
|
||||
.log_err()?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
tool_choice: request.tool_choice,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -58,6 +118,9 @@ pub fn language_model_request_message_to_google_ai(
|
||||
proto::LanguageModelRole::LanguageModelUser => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => google_ai::Role::Model,
|
||||
proto::LanguageModelRole::LanguageModelSystem => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelTool => {
|
||||
Err(anyhow!("we don't handle tool calls with google ai yet"))?
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -106,8 +106,12 @@ async fn get_extension_versions(
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DownloadLatestExtensionParams {
|
||||
struct DownloadLatestExtensionPathParams {
|
||||
extension_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DownloadLatestExtensionQueryParams {
|
||||
min_schema_version: Option<i32>,
|
||||
max_schema_version: Option<i32>,
|
||||
min_wasm_api_version: Option<SemanticVersion>,
|
||||
@@ -116,13 +120,14 @@ struct DownloadLatestExtensionParams {
|
||||
|
||||
async fn download_latest_extension(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Path(params): Path<DownloadLatestExtensionParams>,
|
||||
Path(params): Path<DownloadLatestExtensionPathParams>,
|
||||
Query(query): Query<DownloadLatestExtensionQueryParams>,
|
||||
) -> Result<Redirect> {
|
||||
let constraints = maybe!({
|
||||
let min_schema_version = params.min_schema_version?;
|
||||
let max_schema_version = params.max_schema_version?;
|
||||
let min_wasm_api_version = params.min_wasm_api_version?;
|
||||
let max_wasm_api_version = params.max_wasm_api_version?;
|
||||
let min_schema_version = query.min_schema_version?;
|
||||
let max_schema_version = query.max_schema_version?;
|
||||
let min_wasm_api_version = query.min_wasm_api_version?;
|
||||
let max_wasm_api_version = query.max_wasm_api_version?;
|
||||
|
||||
Some(ExtensionVersionConstraints {
|
||||
schema_versions: min_schema_version..=max_schema_version,
|
||||
|
||||
@@ -655,8 +655,6 @@ pub struct ChannelsForUser {
|
||||
pub channel_memberships: Vec<channel_member::Model>,
|
||||
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
|
||||
pub hosted_projects: Vec<proto::HostedProject>,
|
||||
pub dev_servers: Vec<dev_server::Model>,
|
||||
pub remote_projects: Vec<proto::RemoteProject>,
|
||||
|
||||
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
|
||||
pub observed_channel_messages: Vec<proto::ChannelMessageId>,
|
||||
@@ -764,6 +762,7 @@ pub struct Project {
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
pub remote_project_id: Option<RemoteProjectId>,
|
||||
}
|
||||
|
||||
pub struct ProjectCollaborator {
|
||||
@@ -786,8 +785,7 @@ impl ProjectCollaborator {
|
||||
#[derive(Debug)]
|
||||
pub struct LeftProject {
|
||||
pub id: ProjectId,
|
||||
pub host_user_id: Option<UserId>,
|
||||
pub host_connection_id: Option<ConnectionId>,
|
||||
pub should_unshare: bool,
|
||||
pub connection_ids: Vec<ConnectionId>,
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod dev_servers;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
pub mod hosted_projects;
|
||||
pub mod messages;
|
||||
|
||||
@@ -640,15 +640,10 @@ impl Database {
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
let dev_servers = self.get_dev_servers(&channel_ids, tx).await?;
|
||||
let remote_projects = self.get_remote_projects(&channel_ids, tx).await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channel_memberships,
|
||||
channels,
|
||||
hosted_projects,
|
||||
dev_servers,
|
||||
remote_projects,
|
||||
channel_participants,
|
||||
latest_buffer_versions,
|
||||
latest_channel_messages,
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use sea_orm::{ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, QueryFilter};
|
||||
use rpc::proto;
|
||||
use sea_orm::{
|
||||
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
|
||||
};
|
||||
|
||||
use super::{channel, dev_server, ChannelId, Database, DevServerId, UserId};
|
||||
use super::{dev_server, remote_project, Database, DevServerId, UserId};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server(
|
||||
@@ -16,40 +19,105 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_dev_servers(
|
||||
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remote_projects_update(
|
||||
&self,
|
||||
channel_ids: &Vec<ChannelId>,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
self.transaction(
|
||||
|tx| async move { self.remote_projects_update_internal(user_id, &tx).await },
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remote_projects_update_internal(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<dev_server::Model>> {
|
||||
let servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers)
|
||||
|
||||
let remote_projects = remote_project::Entity::find()
|
||||
.filter(
|
||||
remote_project::Column::DevServerId
|
||||
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
|
||||
)
|
||||
.find_also_related(super::project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(proto::RemoteProjectsUpdate {
|
||||
dev_servers: dev_servers
|
||||
.into_iter()
|
||||
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
|
||||
.collect(),
|
||||
remote_projects: remote_projects
|
||||
.into_iter()
|
||||
.map(|(remote_project, project)| remote_project.to_proto(project))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_dev_server(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
name: &str,
|
||||
hashed_access_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(channel::Model, dev_server::Model)> {
|
||||
) -> crate::Result<(dev_server::Model, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((channel, dev_server))
|
||||
let remote_projects = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok((dev_server, remote_projects))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_dev_server(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
remote_project::Entity::delete_many()
|
||||
.filter(remote_project::Column::DevServerId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
dev_server::Entity::delete(dev_server.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let remote_projects = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok(remote_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
94
crates/collab/src/db/queries/embeddings.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use super::*;
|
||||
use time::Duration;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_embeddings(
|
||||
&self,
|
||||
model: &str,
|
||||
digests: &[Vec<u8>],
|
||||
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
|
||||
self.weak_transaction(|tx| async move {
|
||||
let embeddings = {
|
||||
let mut db_embeddings = embedding::Entity::find()
|
||||
.filter(
|
||||
embedding::Column::Model.eq(model).and(
|
||||
embedding::Column::Digest
|
||||
.is_in(digests.iter().map(|digest| digest.as_slice())),
|
||||
),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut embeddings = HashMap::default();
|
||||
while let Some(db_embedding) = db_embeddings.next().await {
|
||||
let db_embedding = db_embedding?;
|
||||
embeddings.insert(db_embedding.digest, db_embedding.dimensions);
|
||||
}
|
||||
embeddings
|
||||
};
|
||||
|
||||
if !embeddings.is_empty() {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let retrieved_at = PrimitiveDateTime::new(now.date(), now.time());
|
||||
|
||||
embedding::Entity::update_many()
|
||||
.filter(
|
||||
embedding::Column::Digest
|
||||
.is_in(embeddings.keys().map(|digest| digest.as_slice())),
|
||||
)
|
||||
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(embeddings)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn save_embeddings(
|
||||
&self,
|
||||
model: &str,
|
||||
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
|
||||
) -> Result<()> {
|
||||
self.weak_transaction(|tx| async move {
|
||||
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
|
||||
let now_offset_datetime = OffsetDateTime::now_utc();
|
||||
let retrieved_at =
|
||||
PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time());
|
||||
|
||||
embedding::ActiveModel {
|
||||
model: ActiveValue::set(model.to_string()),
|
||||
digest: ActiveValue::set(digest.clone()),
|
||||
dimensions: ActiveValue::set(dimensions.clone()),
|
||||
retrieved_at: ActiveValue::set(retrieved_at),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([embedding::Column::Model, embedding::Column::Digest])
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn purge_old_embeddings(&self) -> Result<()> {
|
||||
self.weak_transaction(|tx| async move {
|
||||
embedding::Entity::delete_many()
|
||||
.filter(
|
||||
embedding::Column::RetrievedAt
|
||||
.lte(OffsetDateTime::now_utc() - Duration::days(60)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
remote_project_id: Option<RemoteProjectId>,
|
||||
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
@@ -58,6 +59,30 @@ impl Database {
|
||||
return Err(anyhow!("guests cannot share projects"))?;
|
||||
}
|
||||
|
||||
if let Some(remote_project_id) = remote_project_id {
|
||||
let project = project::Entity::find()
|
||||
.filter(project::Column::RemoteProjectId.eq(Some(remote_project_id)))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project"))?;
|
||||
|
||||
if project.room_id.is_some() {
|
||||
return Err(anyhow!("project already shared"))?;
|
||||
};
|
||||
|
||||
let project = project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(Some(room_id)),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
// todo! check user is a project-collaborator
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
return Ok((project.id, room));
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::set(Some(participant.room_id)),
|
||||
host_user_id: ActiveValue::set(Some(participant.user_id)),
|
||||
@@ -111,6 +136,7 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
user_id: Option<UserId>,
|
||||
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
@@ -118,19 +144,37 @@ impl Database {
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project not found"))?;
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if project.host_connection()? == connection {
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
project::Entity::delete(project.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
} else {
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
return Ok((room, guest_connection_ids));
|
||||
}
|
||||
if let Some(remote_project_id) = project.remote_project_id {
|
||||
if let Some(user_id) = user_id {
|
||||
if user_id
|
||||
!= self
|
||||
.owner_for_remote_project(remote_project_id, &tx)
|
||||
.await?
|
||||
{
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
}
|
||||
project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
return Ok((room, guest_connection_ids));
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -753,6 +797,7 @@ impl Database {
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
remote_project_id: project.remote_project_id,
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
}
|
||||
@@ -794,8 +839,7 @@ impl Database {
|
||||
Ok(LeftProject {
|
||||
id: project.id,
|
||||
connection_ids,
|
||||
host_user_id: None,
|
||||
host_connection_id: None,
|
||||
should_unshare: false,
|
||||
})
|
||||
})
|
||||
.await
|
||||
@@ -832,7 +876,7 @@ impl Database {
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
let connection_ids: Vec<ConnectionId> = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
@@ -870,8 +914,7 @@ impl Database {
|
||||
|
||||
let left_project = LeftProject {
|
||||
id: project_id,
|
||||
host_user_id: project.host_user_id,
|
||||
host_connection_id: Some(project.host_connection()?),
|
||||
should_unshare: connection == project.host_connection()?,
|
||||
connection_ids,
|
||||
};
|
||||
Ok((room, left_project))
|
||||
@@ -914,7 +957,7 @@ impl Database {
|
||||
capability: Capability,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(project::Model, ChannelRole)> {
|
||||
let (project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
let (mut project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(remote_project::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
@@ -933,27 +976,44 @@ impl Database {
|
||||
PrincipalId::UserId(user_id) => user_id,
|
||||
};
|
||||
|
||||
let role = if let Some(remote_project) = remote_project {
|
||||
let channel = channel::Entity::find_by_id(remote_project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?
|
||||
} else if let Some(room_id) = project.room_id {
|
||||
// what's the users role?
|
||||
let current_participant = room_participant::Entity::find()
|
||||
let role_from_room = if let Some(room_id) = project.room_id {
|
||||
room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
current_participant.role.unwrap_or(ChannelRole::Guest)
|
||||
.and_then(|participant| participant.role)
|
||||
} else {
|
||||
return Err(anyhow!("not authorized to read projects"))?;
|
||||
None
|
||||
};
|
||||
let role_from_remote_project = if let Some(remote_project) = remote_project {
|
||||
let dev_server = dev_server::Entity::find_by_id(remote_project.dev_server_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
if user_id == dev_server.user_id {
|
||||
// If the user left the room "uncleanly" they may rejoin the
|
||||
// remote project before leave_room runs. IN that case kick
|
||||
// the project out of the room pre-emptively.
|
||||
if role_from_room.is_none() {
|
||||
project = project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
Some(ChannelRole::Admin)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let role = role_from_remote_project
|
||||
.or(role_from_room)
|
||||
.unwrap_or(ChannelRole::Banned);
|
||||
|
||||
match capability {
|
||||
Capability::ReadWrite => {
|
||||
|
||||
@@ -8,8 +8,8 @@ use sea_orm::{
|
||||
use crate::db::ProjectId;
|
||||
|
||||
use super::{
|
||||
channel, project, project_collaborator, remote_project, worktree, ChannelId, Database,
|
||||
DevServerId, RejoinedProject, RemoteProjectId, ResharedProject, ServerId, UserId,
|
||||
dev_server, project, project_collaborator, remote_project, worktree, Database, DevServerId,
|
||||
RejoinedProject, RemoteProjectId, ResharedProject, ServerId, UserId,
|
||||
};
|
||||
|
||||
impl Database {
|
||||
@@ -26,29 +26,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_remote_projects(
|
||||
&self,
|
||||
channel_ids: &Vec<ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<proto::RemoteProject>> {
|
||||
let servers = remote_project::Entity::find()
|
||||
.filter(remote_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.find_also_related(project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(remote_project, project)| proto::RemoteProject {
|
||||
id: remote_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: remote_project.channel_id.to_proto(),
|
||||
name: remote_project.name,
|
||||
dev_server_id: remote_project.dev_server_id.to_proto(),
|
||||
path: remote_project.path,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn get_remote_projects_for_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
@@ -64,8 +41,6 @@ impl Database {
|
||||
.map(|(remote_project, project)| proto::RemoteProject {
|
||||
id: remote_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: remote_project.channel_id.to_proto(),
|
||||
name: remote_project.name,
|
||||
dev_server_id: remote_project.dev_server_id.to_proto(),
|
||||
path: remote_project.path,
|
||||
})
|
||||
@@ -74,6 +49,38 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remote_project_ids_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<RemoteProjectId>> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.find_with_related(remote_project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_servers
|
||||
.into_iter()
|
||||
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn owner_for_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<UserId> {
|
||||
let dev_server = remote_project::Entity::find_by_id(remote_project_id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.and_then(|(_, dev_server)| dev_server)
|
||||
.ok_or_else(|| anyhow!("no remote project"))?;
|
||||
|
||||
Ok(dev_server.user_id)
|
||||
}
|
||||
|
||||
pub async fn get_stale_dev_server_projects(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
@@ -95,28 +102,30 @@ impl Database {
|
||||
|
||||
pub async fn create_remote_project(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
dev_server_id: DevServerId,
|
||||
name: &str,
|
||||
path: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(channel::Model, remote_project::Model)> {
|
||||
) -> crate::Result<(remote_project::Model, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
|
||||
let project = remote_project::Entity::insert(remote_project::ActiveModel {
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
id: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
dev_server_id: ActiveValue::Set(dev_server_id),
|
||||
path: ActiveValue::Set(path.to_string()),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((channel, project))
|
||||
let status = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok((project, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -127,8 +136,13 @@ impl Database {
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> crate::Result<proto::RemoteProject> {
|
||||
) -> crate::Result<(proto::RemoteProject, UserId, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
|
||||
let remote_project = remote_project::Entity::find_by_id(remote_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
@@ -168,7 +182,15 @@ impl Database {
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(remote_project.to_proto(Some(project)))
|
||||
let status = self
|
||||
.remote_projects_update_internal(dev_server.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((
|
||||
remote_project.to_proto(Some(project)),
|
||||
dev_server.user_id,
|
||||
status,
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -849,11 +849,32 @@ impl Database {
|
||||
.into_values::<_, QueryProjectIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
|
||||
let remote_projects_for_user = self
|
||||
.remote_project_ids_for_user(leaving_participant.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let remote_projects_to_unshare = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::RoomId.eq(room_id))
|
||||
.add(
|
||||
project::Column::RemoteProjectId
|
||||
.is_in(remote_projects_for_user.clone()),
|
||||
),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|project| project.id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut left_projects = HashMap::default();
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
let left_project =
|
||||
@@ -861,9 +882,8 @@ impl Database {
|
||||
.entry(collaborator.project_id)
|
||||
.or_insert(LeftProject {
|
||||
id: collaborator.project_id,
|
||||
host_user_id: Default::default(),
|
||||
connection_ids: Default::default(),
|
||||
host_connection_id: None,
|
||||
should_unshare: false,
|
||||
});
|
||||
|
||||
let collaborator_connection_id = collaborator.connection();
|
||||
@@ -871,9 +891,10 @@ impl Database {
|
||||
left_project.connection_ids.push(collaborator_connection_id);
|
||||
}
|
||||
|
||||
if collaborator.is_host {
|
||||
left_project.host_user_id = Some(collaborator.user_id);
|
||||
left_project.host_connection_id = Some(collaborator_connection_id);
|
||||
if (collaborator.is_host && collaborator.connection() == connection)
|
||||
|| remote_projects_to_unshare.contains(&collaborator.project_id)
|
||||
{
|
||||
left_project.should_unshare = true;
|
||||
}
|
||||
}
|
||||
drop(collaborators);
|
||||
@@ -915,6 +936,17 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if !remote_projects_to_unshare.is_empty() {
|
||||
project::Entity::update_many()
|
||||
.filter(project::Column::Id.is_in(remote_projects_to_unshare))
|
||||
.set(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let deleted = if room.participants.is_empty() {
|
||||
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
|
||||
@@ -1264,38 +1296,46 @@ impl Database {
|
||||
}
|
||||
drop(db_participants);
|
||||
|
||||
let mut db_projects = db_room
|
||||
let db_projects = db_room
|
||||
.find_related(project::Entity)
|
||||
.find_with_related(worktree::Entity)
|
||||
.stream(tx)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
while let Some(row) = db_projects.next().await {
|
||||
let (db_project, db_worktree) = row?;
|
||||
for (db_project, db_worktrees) in db_projects {
|
||||
let host_connection = db_project.host_connection()?;
|
||||
if let Some(participant) = participants.get_mut(&host_connection) {
|
||||
let project = if let Some(project) = participant
|
||||
.projects
|
||||
.iter_mut()
|
||||
.find(|project| project.id == db_project.id.to_proto())
|
||||
{
|
||||
project
|
||||
} else {
|
||||
participant.projects.push(proto::ParticipantProject {
|
||||
id: db_project.id.to_proto(),
|
||||
worktree_root_names: Default::default(),
|
||||
});
|
||||
participant.projects.last_mut().unwrap()
|
||||
};
|
||||
participant.projects.push(proto::ParticipantProject {
|
||||
id: db_project.id.to_proto(),
|
||||
worktree_root_names: Default::default(),
|
||||
});
|
||||
let project = participant.projects.last_mut().unwrap();
|
||||
|
||||
if let Some(db_worktree) = db_worktree {
|
||||
for db_worktree in db_worktrees {
|
||||
if db_worktree.visible {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
} else if let Some(remote_project_id) = db_project.remote_project_id {
|
||||
let host = self.owner_for_remote_project(remote_project_id, tx).await?;
|
||||
if let Some((_, participant)) = participants
|
||||
.iter_mut()
|
||||
.find(|(_, v)| v.user_id == host.to_proto())
|
||||
{
|
||||
participant.projects.push(proto::ParticipantProject {
|
||||
id: db_project.id.to_proto(),
|
||||
worktree_root_names: Default::default(),
|
||||
});
|
||||
let project = participant.projects.last_mut().unwrap();
|
||||
|
||||
for db_worktree in db_worktrees {
|
||||
if db_worktree.visible {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
drop(db_projects);
|
||||
|
||||
let mut db_followers = db_room.find_related(follower::Entity).stream(tx).await?;
|
||||
let mut followers = Vec::new();
|
||||
|
||||
@@ -11,6 +11,7 @@ pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod dev_server;
|
||||
pub mod embedding;
|
||||
pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod feature_flag;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{ChannelId, DevServerId};
|
||||
use crate::db::{DevServerId, UserId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
@@ -8,20 +8,28 @@ pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: DevServerId,
|
||||
pub name: String,
|
||||
pub channel_id: ChannelId,
|
||||
pub user_id: UserId,
|
||||
pub hashed_token: String,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::remote_project::Entity")]
|
||||
RemoteProject,
|
||||
}
|
||||
|
||||
impl Related<super::remote_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
|
||||
proto::DevServer {
|
||||
dev_server_id: self.id.to_proto(),
|
||||
channel_id: self.channel_id.to_proto(),
|
||||
name: self.name.clone(),
|
||||
status: status as i32,
|
||||
}
|
||||
|
||||
18
crates/collab/src/db/tables/embedding.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "embeddings")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub model: String,
|
||||
#[sea_orm(primary_key)]
|
||||
pub digest: Vec<u8>,
|
||||
pub dimensions: Vec<f32>,
|
||||
pub retrieved_at: PrimitiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,5 +1,5 @@
|
||||
use super::project;
|
||||
use crate::db::{ChannelId, DevServerId, RemoteProjectId};
|
||||
use crate::db::{DevServerId, RemoteProjectId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
@@ -8,9 +8,7 @@ use sea_orm::entity::prelude::*;
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RemoteProjectId,
|
||||
pub channel_id: ChannelId,
|
||||
pub dev_server_id: DevServerId,
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
@@ -20,6 +18,12 @@ impl ActiveModelBehavior for ActiveModel {}
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::dev_server::Entity",
|
||||
from = "Column::DevServerId",
|
||||
to = "super::dev_server::Column::Id"
|
||||
)]
|
||||
DevServer,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
@@ -28,14 +32,18 @@ impl Related<super::project::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::dev_server::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::DevServer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, project: Option<project::Model>) -> proto::RemoteProject {
|
||||
proto::RemoteProject {
|
||||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: self.channel_id.to_proto(),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
name: self.name.clone(),
|
||||
path: self.path.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ mod buffer_tests;
|
||||
mod channel_tests;
|
||||
mod contributor_tests;
|
||||
mod db_tests;
|
||||
mod embedding_tests;
|
||||
mod extension_tests;
|
||||
mod feature_flag_tests;
|
||||
mod message_tests;
|
||||
|
||||
@@ -535,18 +535,18 @@ async fn test_project_count(db: &Arc<Database>) {
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
// Projects shared by admins aren't counted.
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
84
crates/collab/src/db/tests/embedding_tests.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use super::TestDb;
|
||||
use crate::db::embedding;
|
||||
use collections::HashMap;
|
||||
use sea_orm::{sea_query::Expr, ColumnTrait, EntityTrait, QueryFilter};
|
||||
use std::ops::Sub;
|
||||
use time::{Duration, OffsetDateTime, PrimitiveDateTime};
|
||||
|
||||
// SQLite does not support array arguments, so we only test this against a real postgres instance
|
||||
#[gpui::test]
|
||||
async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
|
||||
let test_db = TestDb::postgres(cx.executor().clone());
|
||||
let db = test_db.db();
|
||||
|
||||
let provider = "test_model";
|
||||
let digest1 = vec![1, 2, 3];
|
||||
let digest2 = vec![4, 5, 6];
|
||||
let embeddings = HashMap::from_iter([
|
||||
(digest1.clone(), vec![0.1, 0.2, 0.3]),
|
||||
(digest2.clone(), vec![0.4, 0.5, 0.6]),
|
||||
]);
|
||||
|
||||
// Save embeddings
|
||||
db.save_embeddings(provider, &embeddings).await.unwrap();
|
||||
|
||||
// Retrieve embeddings
|
||||
let retrieved_embeddings = db
|
||||
.get_embeddings(provider, &[digest1.clone(), digest2.clone()])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(retrieved_embeddings.len(), 2);
|
||||
assert!(retrieved_embeddings.contains_key(&digest1));
|
||||
assert!(retrieved_embeddings.contains_key(&digest2));
|
||||
|
||||
// Check if the retrieved embeddings are correct
|
||||
assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]);
|
||||
assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
|
||||
let test_db = TestDb::postgres(cx.executor().clone());
|
||||
let db = test_db.db();
|
||||
|
||||
let model = "test_model";
|
||||
let digest = vec![7, 8, 9];
|
||||
let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]);
|
||||
|
||||
// Save old embeddings
|
||||
db.save_embeddings(model, &embeddings).await.unwrap();
|
||||
|
||||
// Reach into the DB and change the retrieved at to be > 60 days
|
||||
db.weak_transaction(|tx| {
|
||||
let digest = digest.clone();
|
||||
async move {
|
||||
let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));
|
||||
let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time());
|
||||
|
||||
embedding::Entity::update_many()
|
||||
.filter(
|
||||
embedding::Column::Model
|
||||
.eq(model)
|
||||
.and(embedding::Column::Digest.eq(digest)),
|
||||
)
|
||||
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
|
||||
.exec(&*tx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Purge old embeddings
|
||||
db.purge_old_embeddings().await.unwrap();
|
||||
|
||||
// Try to retrieve the purged embeddings
|
||||
let retrieved_embeddings = db.get_embeddings(model, &[digest.clone()]).await.unwrap();
|
||||
assert!(
|
||||
retrieved_embeddings.is_empty(),
|
||||
"Old embeddings should have been purged"
|
||||
);
|
||||
}
|
||||
@@ -6,8 +6,8 @@ use axum::{
|
||||
Extension, Router,
|
||||
};
|
||||
use collab::{
|
||||
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor, AppState,
|
||||
Config, RateLimiter, Result,
|
||||
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor,
|
||||
rpc::ResultExt, AppState, Config, RateLimiter, Result,
|
||||
};
|
||||
use db::Database;
|
||||
use std::{
|
||||
@@ -23,7 +23,7 @@ use tower_http::trace::TraceLayer;
|
||||
use tracing_subscriber::{
|
||||
filter::EnvFilter, fmt::format::JsonFields, util::SubscriberInitExt, Layer,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use util::ResultExt as _;
|
||||
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const REVISION: Option<&'static str> = option_env!("GITHUB_SHA");
|
||||
@@ -90,6 +90,7 @@ async fn main() -> Result<()> {
|
||||
};
|
||||
|
||||
if is_collab {
|
||||
state.db.purge_old_embeddings().await.trace_err();
|
||||
RateLimiter::save_periodically(state.rate_limiter.clone(), state.executor.clone());
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,8 @@ use axum::{
|
||||
use collections::{HashMap, HashSet};
|
||||
pub use connection_pool::{ConnectionPool, ZedVersion};
|
||||
use core::fmt::{self, Debug, Formatter};
|
||||
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
|
||||
use sha2::Digest;
|
||||
|
||||
use futures::{
|
||||
channel::oneshot,
|
||||
@@ -253,6 +255,13 @@ impl DevServerSession {
|
||||
pub fn dev_server_id(&self) -> DevServerId {
|
||||
self.0.dev_server_id().unwrap()
|
||||
}
|
||||
|
||||
fn dev_server(&self) -> &dev_server::Model {
|
||||
match &self.0.principal {
|
||||
Principal::DevServer(dev_server) => dev_server,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for DevServerSession {
|
||||
@@ -403,6 +412,7 @@ impl Server {
|
||||
.add_request_handler(user_handler(rejoin_remote_projects))
|
||||
.add_request_handler(user_handler(create_remote_project))
|
||||
.add_request_handler(user_handler(create_dev_server))
|
||||
.add_request_handler(user_handler(delete_dev_server))
|
||||
.add_request_handler(dev_server_handler(share_remote_project))
|
||||
.add_request_handler(dev_server_handler(shutdown_dev_server))
|
||||
.add_request_handler(dev_server_handler(reconnect_dev_server))
|
||||
@@ -568,6 +578,22 @@ impl Server {
|
||||
app_state.config.google_ai_api_key.clone(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.add_request_handler({
|
||||
user_handler(move |request, response, session| {
|
||||
get_cached_embeddings(request, response, session)
|
||||
})
|
||||
})
|
||||
.add_request_handler({
|
||||
let app_state = app_state.clone();
|
||||
user_handler(move |request, response, session| {
|
||||
compute_embeddings(
|
||||
request,
|
||||
response,
|
||||
session,
|
||||
app_state.config.openai_api_key.clone(),
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
Arc::new(server)
|
||||
@@ -749,9 +775,7 @@ impl Server {
|
||||
Box::new(move |envelope, session| {
|
||||
let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
|
||||
let received_at = envelope.received_at;
|
||||
tracing::info!(
|
||||
"message received"
|
||||
);
|
||||
tracing::info!("message received");
|
||||
let start_time = Instant::now();
|
||||
let future = (handler)(*envelope, session);
|
||||
async move {
|
||||
@@ -760,12 +784,24 @@ impl Server {
|
||||
let processing_duration_ms = start_time.elapsed().as_micros() as f64 / 1000.0;
|
||||
let queue_duration_ms = total_duration_ms - processing_duration_ms;
|
||||
let payload_type = M::NAME;
|
||||
|
||||
match result {
|
||||
Err(error) => {
|
||||
// todo!(), why isn't this logged inside the span?
|
||||
tracing::error!(%error, total_duration_ms, processing_duration_ms, queue_duration_ms, payload_type, "error handling message")
|
||||
tracing::error!(
|
||||
?error,
|
||||
total_duration_ms,
|
||||
processing_duration_ms,
|
||||
queue_duration_ms,
|
||||
payload_type,
|
||||
"error handling message"
|
||||
)
|
||||
}
|
||||
Ok(()) => tracing::info!(total_duration_ms, processing_duration_ms, queue_duration_ms, "finished handling message"),
|
||||
Ok(()) => tracing::info!(
|
||||
total_duration_ms,
|
||||
processing_duration_ms,
|
||||
queue_duration_ms,
|
||||
"finished handling message"
|
||||
),
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
@@ -1026,12 +1062,14 @@ impl Server {
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (contacts, channels_for_user, channel_invites) = future::try_join3(
|
||||
self.app_state.db.get_contacts(user.id),
|
||||
self.app_state.db.get_channels_for_user(user.id),
|
||||
self.app_state.db.get_channel_invites_for_user(user.id),
|
||||
)
|
||||
.await?;
|
||||
let (contacts, channels_for_user, channel_invites, remote_projects) =
|
||||
future::try_join4(
|
||||
self.app_state.db.get_contacts(user.id),
|
||||
self.app_state.db.get_channels_for_user(user.id),
|
||||
self.app_state.db.get_channel_invites_for_user(user.id),
|
||||
self.app_state.db.remote_projects_update(user.id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
let mut pool = self.connection_pool.lock();
|
||||
@@ -1049,9 +1087,10 @@ impl Server {
|
||||
)?;
|
||||
self.peer.send(
|
||||
connection_id,
|
||||
build_channels_update(channels_for_user, channel_invites, &pool),
|
||||
build_channels_update(channels_for_user, channel_invites),
|
||||
)?;
|
||||
}
|
||||
send_remote_projects_update(user.id, remote_projects, session).await;
|
||||
|
||||
if let Some(incoming_call) =
|
||||
self.app_state.db.incoming_call_for_user(user.id).await?
|
||||
@@ -1069,9 +1108,6 @@ impl Server {
|
||||
};
|
||||
pool.add_dev_server(connection_id, dev_server.id, zed_version);
|
||||
}
|
||||
update_dev_server_status(dev_server, proto::DevServerStatus::Online, &session)
|
||||
.await;
|
||||
// todo!() allow only one connection.
|
||||
|
||||
let projects = self
|
||||
.app_state
|
||||
@@ -1080,6 +1116,13 @@ impl Server {
|
||||
.await?;
|
||||
self.peer
|
||||
.send(connection_id, proto::DevServerInstructions { projects })?;
|
||||
|
||||
let status = self
|
||||
.app_state
|
||||
.db
|
||||
.remote_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_remote_projects_update(dev_server.user_id, status, &session).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1383,10 +1426,8 @@ async fn connection_lost(
|
||||
|
||||
update_user_contacts(session.user_id(), &session).await?;
|
||||
},
|
||||
Principal::DevServer(dev_server) => {
|
||||
lost_dev_server_connection(&session).await?;
|
||||
update_dev_server_status(&dev_server, proto::DevServerStatus::Offline, &session)
|
||||
.await;
|
||||
Principal::DevServer(_) => {
|
||||
lost_dev_server_connection(&session.for_dev_server().unwrap()).await?;
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -1923,6 +1964,9 @@ async fn share_project(
|
||||
RoomId::from_proto(request.room_id),
|
||||
session.connection_id,
|
||||
&request.worktrees,
|
||||
request
|
||||
.remote_project_id
|
||||
.map(|id| RemoteProjectId::from_proto(id)),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::ShareProjectResponse {
|
||||
@@ -1936,14 +1980,25 @@ async fn share_project(
|
||||
/// Unshare a project from the room.
|
||||
async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> {
|
||||
let project_id = ProjectId::from_proto(message.project_id);
|
||||
unshare_project_internal(project_id, &session).await
|
||||
unshare_project_internal(
|
||||
project_id,
|
||||
session.connection_id,
|
||||
session.user_id(),
|
||||
&session,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn unshare_project_internal(project_id: ProjectId, session: &Session) -> Result<()> {
|
||||
async fn unshare_project_internal(
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: Option<UserId>,
|
||||
session: &Session,
|
||||
) -> Result<()> {
|
||||
let (room, guest_connection_ids) = &*session
|
||||
.db()
|
||||
.await
|
||||
.unshare_project(project_id, session.connection_id)
|
||||
.unshare_project(project_id, connection_id, user_id)
|
||||
.await?;
|
||||
|
||||
let message = proto::UnshareProject {
|
||||
@@ -1951,7 +2006,7 @@ async fn unshare_project_internal(project_id: ProjectId, session: &Session) -> R
|
||||
};
|
||||
|
||||
broadcast(
|
||||
Some(session.connection_id),
|
||||
Some(connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|conn_id| session.peer.send(conn_id, message.clone()),
|
||||
);
|
||||
@@ -1962,13 +2017,13 @@ async fn unshare_project_internal(project_id: ProjectId, session: &Session) -> R
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Share a project into the room.
|
||||
/// DevServer makes a project available online
|
||||
async fn share_remote_project(
|
||||
request: proto::ShareRemoteProject,
|
||||
response: Response<proto::ShareRemoteProject>,
|
||||
session: DevServerSession,
|
||||
) -> Result<()> {
|
||||
let remote_project = session
|
||||
let (remote_project, user_id, status) = session
|
||||
.db()
|
||||
.await
|
||||
.share_remote_project(
|
||||
@@ -1982,22 +2037,7 @@ async fn share_remote_project(
|
||||
return Err(anyhow!("failed to share remote project"))?;
|
||||
};
|
||||
|
||||
for (connection_id, _) in session
|
||||
.connection_pool()
|
||||
.await
|
||||
.channel_connection_ids(ChannelId::from_proto(remote_project.channel_id))
|
||||
{
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
connection_id,
|
||||
proto::UpdateChannels {
|
||||
remote_projects: vec![remote_project.clone()],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
send_remote_projects_update(user_id, status, &session).await;
|
||||
|
||||
response.send(proto::ShareProjectResponse { project_id })?;
|
||||
|
||||
@@ -2063,19 +2103,21 @@ fn join_project_internal(
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let add_project_collaborator = proto::AddProjectCollaborator {
|
||||
project_id: project_id.to_proto(),
|
||||
collaborator: Some(proto::Collaborator {
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
replica_id: replica_id.0 as u32,
|
||||
user_id: guest_user_id.to_proto(),
|
||||
}),
|
||||
};
|
||||
|
||||
for collaborator in &collaborators {
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
collaborator.peer_id.unwrap().into(),
|
||||
proto::AddProjectCollaborator {
|
||||
project_id: project_id.to_proto(),
|
||||
collaborator: Some(proto::Collaborator {
|
||||
peer_id: Some(session.connection_id.into()),
|
||||
replica_id: replica_id.0 as u32,
|
||||
user_id: guest_user_id.to_proto(),
|
||||
}),
|
||||
},
|
||||
add_project_collaborator.clone(),
|
||||
)
|
||||
.trace_err();
|
||||
}
|
||||
@@ -2087,7 +2129,10 @@ fn join_project_internal(
|
||||
replica_id: replica_id.0 as u32,
|
||||
collaborators: collaborators.clone(),
|
||||
language_servers: project.language_servers.clone(),
|
||||
role: project.role.into(), // todo
|
||||
role: project.role.into(),
|
||||
remote_project_id: project
|
||||
.remote_project_id
|
||||
.map(|remote_project_id| remote_project_id.0 as u64),
|
||||
})?;
|
||||
|
||||
for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
|
||||
@@ -2170,8 +2215,6 @@ async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Re
|
||||
let (room, project) = &*db.leave_project(project_id, sender_id).await?;
|
||||
tracing::info!(
|
||||
%project_id,
|
||||
host_user_id = ?project.host_user_id,
|
||||
host_connection_id = ?project.host_connection_id,
|
||||
"leave project"
|
||||
);
|
||||
|
||||
@@ -2206,13 +2249,33 @@ async fn create_remote_project(
|
||||
response: Response<proto::CreateRemoteProject>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let (channel, remote_project) = session
|
||||
let dev_server_id = DevServerId(request.dev_server_id as i32);
|
||||
let dev_server_connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id(dev_server_id);
|
||||
let Some(dev_server_connection_id) = dev_server_connection_id else {
|
||||
Err(ErrorCode::DevServerOffline
|
||||
.message("Cannot create a remote project when the dev server is offline".to_string())
|
||||
.anyhow())?
|
||||
};
|
||||
|
||||
let path = request.path.clone();
|
||||
//Check that the path exists on the dev server
|
||||
session
|
||||
.peer
|
||||
.forward_request(
|
||||
session.connection_id,
|
||||
dev_server_connection_id,
|
||||
proto::ValidateRemoteProjectRequest { path: path.clone() },
|
||||
)
|
||||
.await?;
|
||||
|
||||
let (remote_project, update) = session
|
||||
.db()
|
||||
.await
|
||||
.create_remote_project(
|
||||
ChannelId(request.channel_id as i32),
|
||||
DevServerId(request.dev_server_id as i32),
|
||||
&request.name,
|
||||
&request.path,
|
||||
session.user_id(),
|
||||
)
|
||||
@@ -2224,25 +2287,12 @@ async fn create_remote_project(
|
||||
.get_remote_projects_for_dev_server(remote_project.dev_server_id)
|
||||
.await?;
|
||||
|
||||
let update = proto::UpdateChannels {
|
||||
remote_projects: vec![remote_project.to_proto(None)],
|
||||
..Default::default()
|
||||
};
|
||||
let connection_pool = session.connection_pool().await;
|
||||
for (connection_id, role) in connection_pool.channel_connection_ids(channel.root_id()) {
|
||||
if role.can_see_all_descendants() {
|
||||
session.peer.send(connection_id, update.clone())?;
|
||||
}
|
||||
}
|
||||
session.peer.send(
|
||||
dev_server_connection_id,
|
||||
proto::DevServerInstructions { projects },
|
||||
)?;
|
||||
|
||||
let dev_server_id = remote_project.dev_server_id;
|
||||
let dev_server_connection_id = connection_pool.dev_server_connection_id(dev_server_id);
|
||||
if let Some(dev_server_connection_id) = dev_server_connection_id {
|
||||
session.peer.send(
|
||||
dev_server_connection_id,
|
||||
proto::DevServerInstructions { projects },
|
||||
)?;
|
||||
}
|
||||
send_remote_projects_update(session.user_id(), update, &session).await;
|
||||
|
||||
response.send(proto::CreateRemoteProjectResponse {
|
||||
remote_project: Some(remote_project.to_proto(None)),
|
||||
@@ -2258,37 +2308,56 @@ async fn create_dev_server(
|
||||
let access_token = auth::random_token();
|
||||
let hashed_access_token = auth::hash_access_token(&access_token);
|
||||
|
||||
let (channel, dev_server) = session
|
||||
let (dev_server, status) = session
|
||||
.db()
|
||||
.await
|
||||
.create_dev_server(
|
||||
ChannelId(request.channel_id as i32),
|
||||
&request.name,
|
||||
&hashed_access_token,
|
||||
session.user_id(),
|
||||
)
|
||||
.create_dev_server(&request.name, &hashed_access_token, session.user_id())
|
||||
.await?;
|
||||
|
||||
let update = proto::UpdateChannels {
|
||||
dev_servers: vec![dev_server.to_proto(proto::DevServerStatus::Offline)],
|
||||
..Default::default()
|
||||
};
|
||||
let connection_pool = session.connection_pool().await;
|
||||
for (connection_id, role) in connection_pool.channel_connection_ids(channel.root_id()) {
|
||||
if role.can_see_channel(channel.visibility) {
|
||||
session.peer.send(connection_id, update.clone())?;
|
||||
}
|
||||
}
|
||||
send_remote_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::CreateDevServerResponse {
|
||||
dev_server_id: dev_server.id.0 as u64,
|
||||
channel_id: request.channel_id,
|
||||
access_token: auth::generate_dev_server_token(dev_server.id.0 as usize, access_token),
|
||||
name: request.name.clone(),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_dev_server(
|
||||
request: proto::DeleteDevServer,
|
||||
response: Response<proto::DeleteDevServer>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let dev_server_id = DevServerId(request.dev_server_id as i32);
|
||||
let dev_server = session.db().await.get_dev_server(dev_server_id).await?;
|
||||
if dev_server.user_id != session.user_id() {
|
||||
return Err(anyhow!(ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
let connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id(dev_server_id);
|
||||
if let Some(connection_id) = connection_id {
|
||||
shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?;
|
||||
session
|
||||
.peer
|
||||
.send(connection_id, proto::ShutdownDevServer {})?;
|
||||
}
|
||||
|
||||
let status = session
|
||||
.db()
|
||||
.await
|
||||
.delete_dev_server(dev_server_id, session.user_id())
|
||||
.await?;
|
||||
|
||||
send_remote_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::Ack {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rejoin_remote_projects(
|
||||
request: proto::RejoinRemoteProjects,
|
||||
response: Response<proto::RejoinRemoteProjects>,
|
||||
@@ -2385,8 +2454,15 @@ async fn shutdown_dev_server(
|
||||
session: DevServerSession,
|
||||
) -> Result<()> {
|
||||
response.send(proto::Ack {})?;
|
||||
shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await
|
||||
}
|
||||
|
||||
async fn shutdown_dev_server_internal(
|
||||
dev_server_id: DevServerId,
|
||||
connection_id: ConnectionId,
|
||||
session: &Session,
|
||||
) -> Result<()> {
|
||||
let (remote_projects, dev_server) = {
|
||||
let dev_server_id = session.dev_server_id();
|
||||
let db = session.db().await;
|
||||
let remote_projects = db.get_remote_projects_for_dev_server(dev_server_id).await?;
|
||||
let dev_server = db.get_dev_server(dev_server_id).await?;
|
||||
@@ -2394,22 +2470,26 @@ async fn shutdown_dev_server(
|
||||
};
|
||||
|
||||
for project_id in remote_projects.iter().filter_map(|p| p.project_id) {
|
||||
unshare_project_internal(ProjectId::from_proto(project_id), &session.0).await?;
|
||||
unshare_project_internal(
|
||||
ProjectId::from_proto(project_id),
|
||||
connection_id,
|
||||
None,
|
||||
session,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let update = proto::UpdateChannels {
|
||||
remote_projects,
|
||||
dev_servers: vec![dev_server.to_proto(proto::DevServerStatus::Offline)],
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
for (connection_id, _) in session
|
||||
session
|
||||
.connection_pool()
|
||||
.await
|
||||
.channel_connection_ids(dev_server.channel_id)
|
||||
{
|
||||
session.peer.send(connection_id, update.clone()).trace_err();
|
||||
}
|
||||
.set_dev_server_offline(dev_server_id);
|
||||
|
||||
let status = session
|
||||
.db()
|
||||
.await
|
||||
.remote_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_remote_projects_update(dev_server.user_id, status, &session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -4021,8 +4101,6 @@ async fn complete_with_open_ai(
|
||||
session: UserSession,
|
||||
api_key: Arc<str>,
|
||||
) -> Result<()> {
|
||||
const OPEN_AI_API_URL: &str = "https://api.openai.com/v1";
|
||||
|
||||
let mut completion_stream = open_ai::stream_completion(
|
||||
&session.http_client,
|
||||
OPEN_AI_API_URL,
|
||||
@@ -4030,7 +4108,7 @@ async fn complete_with_open_ai(
|
||||
crate::ai::language_model_request_to_open_ai(request)?,
|
||||
)
|
||||
.await
|
||||
.context("open_ai::stream_completion request failed")?;
|
||||
.context("open_ai::stream_completion request failed within collab")?;
|
||||
|
||||
while let Some(event) = completion_stream.next().await {
|
||||
let event = event?;
|
||||
@@ -4045,8 +4123,32 @@ async fn complete_with_open_ai(
|
||||
open_ai::Role::User => LanguageModelRole::LanguageModelUser,
|
||||
open_ai::Role::Assistant => LanguageModelRole::LanguageModelAssistant,
|
||||
open_ai::Role::System => LanguageModelRole::LanguageModelSystem,
|
||||
open_ai::Role::Tool => LanguageModelRole::LanguageModelTool,
|
||||
} as i32),
|
||||
content: choice.delta.content,
|
||||
tool_calls: choice
|
||||
.delta
|
||||
.tool_calls
|
||||
.into_iter()
|
||||
.map(|delta| proto::ToolCallDelta {
|
||||
index: delta.index as u32,
|
||||
id: delta.id,
|
||||
variant: match delta.function {
|
||||
Some(function) => {
|
||||
let name = function.name;
|
||||
let arguments = function.arguments;
|
||||
|
||||
Some(proto::tool_call_delta::Variant::Function(
|
||||
proto::tool_call_delta::FunctionCallDelta {
|
||||
name,
|
||||
arguments,
|
||||
},
|
||||
))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
finish_reason: choice.finish_reason,
|
||||
})
|
||||
@@ -4097,6 +4199,8 @@ async fn complete_with_google_ai(
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
// Tool calls are not supported for Google
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: candidate.finish_reason.map(|reason| reason.to_string()),
|
||||
})
|
||||
@@ -4119,24 +4223,28 @@ async fn complete_with_anthropic(
|
||||
let messages = request
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter_map(|message| match message.role() {
|
||||
LanguageModelRole::LanguageModelUser => Some(anthropic::RequestMessage {
|
||||
role: anthropic::Role::User,
|
||||
content: message.content,
|
||||
}),
|
||||
LanguageModelRole::LanguageModelAssistant => Some(anthropic::RequestMessage {
|
||||
role: anthropic::Role::Assistant,
|
||||
content: message.content,
|
||||
}),
|
||||
// Anthropic's API breaks system instructions out as a separate field rather
|
||||
// than having a system message role.
|
||||
LanguageModelRole::LanguageModelSystem => {
|
||||
if !system_message.is_empty() {
|
||||
system_message.push_str("\n\n");
|
||||
}
|
||||
system_message.push_str(&message.content);
|
||||
.filter_map(|message| {
|
||||
match message.role() {
|
||||
LanguageModelRole::LanguageModelUser => Some(anthropic::RequestMessage {
|
||||
role: anthropic::Role::User,
|
||||
content: message.content,
|
||||
}),
|
||||
LanguageModelRole::LanguageModelAssistant => Some(anthropic::RequestMessage {
|
||||
role: anthropic::Role::Assistant,
|
||||
content: message.content,
|
||||
}),
|
||||
// Anthropic's API breaks system instructions out as a separate field rather
|
||||
// than having a system message role.
|
||||
LanguageModelRole::LanguageModelSystem => {
|
||||
if !system_message.is_empty() {
|
||||
system_message.push_str("\n\n");
|
||||
}
|
||||
system_message.push_str(&message.content);
|
||||
|
||||
None
|
||||
None
|
||||
}
|
||||
// We don't yet support tool calls for Anthropic
|
||||
LanguageModelRole::LanguageModelTool => None,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -4180,6 +4288,7 @@ async fn complete_with_anthropic(
|
||||
delta: Some(proto::LanguageModelResponseMessage {
|
||||
role: Some(current_role as i32),
|
||||
content: Some(text),
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: None,
|
||||
}],
|
||||
@@ -4196,6 +4305,7 @@ async fn complete_with_anthropic(
|
||||
delta: Some(proto::LanguageModelResponseMessage {
|
||||
role: Some(current_role as i32),
|
||||
content: Some(text),
|
||||
tool_calls: Vec::new(),
|
||||
}),
|
||||
finish_reason: None,
|
||||
}],
|
||||
@@ -4276,6 +4386,128 @@ async fn count_tokens_with_language_model(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct ComputeEmbeddingsRateLimit;
|
||||
|
||||
impl RateLimit for ComputeEmbeddingsRateLimit {
|
||||
fn capacity() -> usize {
|
||||
std::env::var("EMBED_TEXTS_RATE_LIMIT_PER_HOUR")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(120) // Picked arbitrarily
|
||||
}
|
||||
|
||||
fn refill_duration() -> chrono::Duration {
|
||||
chrono::Duration::hours(1)
|
||||
}
|
||||
|
||||
fn db_name() -> &'static str {
|
||||
"compute-embeddings"
|
||||
}
|
||||
}
|
||||
|
||||
async fn compute_embeddings(
|
||||
request: proto::ComputeEmbeddings,
|
||||
response: Response<proto::ComputeEmbeddings>,
|
||||
session: UserSession,
|
||||
api_key: Option<Arc<str>>,
|
||||
) -> Result<()> {
|
||||
let api_key = api_key.context("no OpenAI API key configured on the server")?;
|
||||
authorize_access_to_language_models(&session).await?;
|
||||
|
||||
session
|
||||
.rate_limiter
|
||||
.check::<ComputeEmbeddingsRateLimit>(session.user_id())
|
||||
.await?;
|
||||
|
||||
let embeddings = match request.model.as_str() {
|
||||
"openai/text-embedding-3-small" => {
|
||||
open_ai::embed(
|
||||
&session.http_client,
|
||||
OPEN_AI_API_URL,
|
||||
&api_key,
|
||||
OpenAiEmbeddingModel::TextEmbedding3Small,
|
||||
request.texts.iter().map(|text| text.as_str()),
|
||||
)
|
||||
.await?
|
||||
}
|
||||
provider => return Err(anyhow!("unsupported embedding provider {:?}", provider))?,
|
||||
};
|
||||
|
||||
let embeddings = request
|
||||
.texts
|
||||
.iter()
|
||||
.map(|text| {
|
||||
let mut hasher = sha2::Sha256::new();
|
||||
hasher.update(text.as_bytes());
|
||||
let result = hasher.finalize();
|
||||
result.to_vec()
|
||||
})
|
||||
.zip(
|
||||
embeddings
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|embedding| embedding.embedding),
|
||||
)
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let db = session.db().await;
|
||||
db.save_embeddings(&request.model, &embeddings)
|
||||
.await
|
||||
.context("failed to save embeddings")
|
||||
.trace_err();
|
||||
|
||||
response.send(proto::ComputeEmbeddingsResponse {
|
||||
embeddings: embeddings
|
||||
.into_iter()
|
||||
.map(|(digest, dimensions)| proto::Embedding { digest, dimensions })
|
||||
.collect(),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct GetCachedEmbeddingsRateLimit;
|
||||
|
||||
impl RateLimit for GetCachedEmbeddingsRateLimit {
|
||||
fn capacity() -> usize {
|
||||
std::env::var("EMBED_TEXTS_RATE_LIMIT_PER_HOUR")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(120) // Picked arbitrarily
|
||||
}
|
||||
|
||||
fn refill_duration() -> chrono::Duration {
|
||||
chrono::Duration::hours(1)
|
||||
}
|
||||
|
||||
fn db_name() -> &'static str {
|
||||
"get-cached-embeddings"
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_embeddings(
|
||||
request: proto::GetCachedEmbeddings,
|
||||
response: Response<proto::GetCachedEmbeddings>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
authorize_access_to_language_models(&session).await?;
|
||||
|
||||
session
|
||||
.rate_limiter
|
||||
.check::<GetCachedEmbeddingsRateLimit>(session.user_id())
|
||||
.await?;
|
||||
|
||||
let db = session.db().await;
|
||||
let embeddings = db.get_embeddings(&request.model, &request.digests).await?;
|
||||
|
||||
response.send(proto::GetCachedEmbeddingsResponse {
|
||||
embeddings: embeddings
|
||||
.into_iter()
|
||||
.map(|(digest, dimensions)| proto::Embedding { digest, dimensions })
|
||||
.collect(),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn authorize_access_to_language_models(session: &UserSession) -> Result<(), Error> {
|
||||
let db = session.db().await;
|
||||
let flags = db.get_user_flags(session.user_id()).await?;
|
||||
@@ -4488,7 +4720,7 @@ fn notify_membership_updated(
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut update = build_channels_update(result.new_channels, vec![], connection_pool);
|
||||
let mut update = build_channels_update(result.new_channels, vec![]);
|
||||
update.delete_channels = result
|
||||
.removed_channels
|
||||
.into_iter()
|
||||
@@ -4521,7 +4753,6 @@ fn build_update_user_channels(channels: &ChannelsForUser) -> proto::UpdateUserCh
|
||||
fn build_channels_update(
|
||||
channels: ChannelsForUser,
|
||||
channel_invites: Vec<db::Channel>,
|
||||
pool: &ConnectionPool,
|
||||
) -> proto::UpdateChannels {
|
||||
let mut update = proto::UpdateChannels::default();
|
||||
|
||||
@@ -4546,13 +4777,6 @@ fn build_channels_update(
|
||||
}
|
||||
|
||||
update.hosted_projects = channels.hosted_projects;
|
||||
update.dev_servers = channels
|
||||
.dev_servers
|
||||
.into_iter()
|
||||
.map(|dev_server| dev_server.to_proto(pool.dev_server_status(dev_server.id)))
|
||||
.collect();
|
||||
update.remote_projects = channels.remote_projects;
|
||||
|
||||
update
|
||||
}
|
||||
|
||||
@@ -4639,24 +4863,19 @@ fn channel_updated(
|
||||
);
|
||||
}
|
||||
|
||||
async fn update_dev_server_status(
|
||||
dev_server: &dev_server::Model,
|
||||
status: proto::DevServerStatus,
|
||||
async fn send_remote_projects_update(
|
||||
user_id: UserId,
|
||||
mut status: proto::RemoteProjectsUpdate,
|
||||
session: &Session,
|
||||
) {
|
||||
let pool = session.connection_pool().await;
|
||||
let connections = pool.channel_connection_ids(dev_server.channel_id);
|
||||
for (connection_id, _) in connections {
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
connection_id,
|
||||
proto::UpdateChannels {
|
||||
dev_servers: vec![dev_server.to_proto(status)],
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
for dev_server in &mut status.dev_servers {
|
||||
dev_server.status =
|
||||
pool.dev_server_status(DevServerId(dev_server.dev_server_id as i32)) as i32;
|
||||
}
|
||||
let connections = pool.user_connection_ids(user_id);
|
||||
for connection_id in connections {
|
||||
session.peer.send(connection_id, status.clone()).trace_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4695,7 +4914,7 @@ async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()>
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn lost_dev_server_connection(session: &Session) -> Result<()> {
|
||||
async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> {
|
||||
log::info!("lost dev server connection, unsharing projects");
|
||||
let project_ids = session
|
||||
.db()
|
||||
@@ -4705,9 +4924,14 @@ async fn lost_dev_server_connection(session: &Session) -> Result<()> {
|
||||
|
||||
for project_id in project_ids {
|
||||
// not unshare re-checks the connection ids match, so we get away with no transaction
|
||||
unshare_project_internal(project_id, &session).await?;
|
||||
unshare_project_internal(project_id, session.connection_id, None, &session).await?;
|
||||
}
|
||||
|
||||
let user_id = session.dev_server().user_id;
|
||||
let update = session.db().await.remote_projects_update(user_id).await?;
|
||||
|
||||
send_remote_projects_update(user_id, update, session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -4809,7 +5033,7 @@ async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
|
||||
|
||||
fn project_left(project: &db::LeftProject, session: &UserSession) {
|
||||
for connection_id in &project.connection_ids {
|
||||
if project.host_user_id == Some(session.user_id()) {
|
||||
if project.should_unshare {
|
||||
session
|
||||
.peer
|
||||
.send(
|
||||
|
||||
@@ -13,6 +13,7 @@ pub struct ConnectionPool {
|
||||
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
|
||||
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
|
||||
channels: ChannelPool,
|
||||
offline_dev_servers: HashSet<DevServerId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
@@ -31,7 +32,7 @@ impl fmt::Display for ZedVersion {
|
||||
|
||||
impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
self.0 >= SemanticVersion::new(0, 127, 3)
|
||||
self.0 >= SemanticVersion::new(0, 129, 2)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,12 +107,17 @@ impl ConnectionPool {
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
self.connected_dev_servers.remove(&dev_server_id);
|
||||
self.offline_dev_servers.remove(&dev_server_id);
|
||||
}
|
||||
}
|
||||
self.connections.remove(&connection_id).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
|
||||
self.offline_dev_servers.insert(dev_server_id);
|
||||
}
|
||||
|
||||
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
|
||||
self.connections.values()
|
||||
}
|
||||
@@ -137,7 +143,9 @@ impl ConnectionPool {
|
||||
}
|
||||
|
||||
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
|
||||
if self.dev_server_connection_id(dev_server_id).is_some() {
|
||||
if self.dev_server_connection_id(dev_server_id).is_some()
|
||||
&& !self.offline_dev_servers.contains(&dev_server_id)
|
||||
{
|
||||
proto::DevServerStatus::Online
|
||||
} else {
|
||||
proto::DevServerStatus::Offline
|
||||
|
||||
@@ -1023,6 +1023,8 @@ async fn test_channel_link_notifications(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
// the new channel shows for b and c
|
||||
assert_channels_list_shape(
|
||||
client_a.channel_store(),
|
||||
|
||||
@@ -1,45 +1,40 @@
|
||||
use std::path::Path;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use call::ActiveCall;
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::VisualTestContext;
|
||||
use rpc::proto::DevServerStatus;
|
||||
use gpui::{TestAppContext, VisualTestContext, WindowHandle};
|
||||
use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt};
|
||||
use serde_json::json;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
use crate::tests::TestServer;
|
||||
use crate::tests::{following_tests::join_channel, TestServer};
|
||||
|
||||
use super::TestClient;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client) = TestServer::start1(cx).await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel("test", None, (&client, cx), &mut [])
|
||||
.await;
|
||||
let store = cx.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = client
|
||||
.channel_store()
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server(channel_id, "server-1".to_string(), cx)
|
||||
store.create_dev_server("server-1".to_string(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
client.channel_store().update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers_for_id(channel_id).len(), 1);
|
||||
assert_eq!(store.dev_servers_for_id(channel_id)[0].name, "server-1");
|
||||
assert_eq!(
|
||||
store.dev_servers_for_id(channel_id)[0].status,
|
||||
DevServerStatus::Offline
|
||||
);
|
||||
store.update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().len(), 1);
|
||||
assert_eq!(store.dev_servers()[0].name, "server-1");
|
||||
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline);
|
||||
});
|
||||
|
||||
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
|
||||
cx.executor().run_until_parked();
|
||||
client.channel_store().update(cx, |store, _| {
|
||||
assert_eq!(
|
||||
store.dev_servers_for_id(channel_id)[0].status,
|
||||
DevServerStatus::Online
|
||||
);
|
||||
store.update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online);
|
||||
});
|
||||
|
||||
dev_server
|
||||
@@ -54,13 +49,10 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
)
|
||||
.await;
|
||||
|
||||
client
|
||||
.channel_store()
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_remote_project(
|
||||
channel_id,
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"project-1".to_string(),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
)
|
||||
@@ -70,12 +62,11 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let remote_workspace = client
|
||||
.channel_store()
|
||||
let remote_workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.remote_projects_for_id(channel_id);
|
||||
let projects = store.remote_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].name, "project-1");
|
||||
assert_eq!(projects[0].path, "/remote");
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client.app_state.clone(),
|
||||
@@ -87,19 +78,19 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let cx2 = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
|
||||
cx2.simulate_keystrokes("cmd-p 1 enter");
|
||||
let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
|
||||
let editor = remote_workspace
|
||||
.update(cx2, |ws, cx| {
|
||||
.update(cx, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
})
|
||||
.unwrap();
|
||||
editor.update(cx2, |ed, cx| {
|
||||
editor.update(cx, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
});
|
||||
cx2.simulate_input("wow!");
|
||||
cx2.simulate_keystrokes("cmd-s");
|
||||
cx.simulate_input("wow!");
|
||||
cx.simulate_keystrokes("cmd-s");
|
||||
|
||||
let content = dev_server
|
||||
.fs()
|
||||
@@ -108,3 +99,263 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
.unwrap();
|
||||
assert_eq!(content, "wow!remote\nremote\nremote\n");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_env_files(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
|
||||
cx1.simulate_keystrokes("cmd-p . e enter");
|
||||
|
||||
let editor = remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
})
|
||||
.unwrap();
|
||||
editor.update(cx1, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "SECRET");
|
||||
});
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
let (workspace2, cx2) = client2.active_workspace(cx2);
|
||||
let editor = workspace2.update(cx2, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
});
|
||||
// TODO: it'd be nice to hide .env files from other people
|
||||
editor.update(cx2, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "SECRET");
|
||||
});
|
||||
}
|
||||
|
||||
async fn create_remote_project(
|
||||
server: &TestServer,
|
||||
client_app_state: Arc<AppState>,
|
||||
cx: &mut TestAppContext,
|
||||
cx_devserver: &mut TestAppContext,
|
||||
) -> (TestClient, WindowHandle<Workspace>) {
|
||||
let store = cx.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server("server-1".to_string(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let dev_server = server
|
||||
.create_dev_server(resp.access_token, cx_devserver)
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
dev_server
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/remote",
|
||||
json!({
|
||||
"1.txt": "remote\nremote\nremote",
|
||||
".env": "SECRET",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_remote_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.remote_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].path, "/remote");
|
||||
workspace::join_remote_project(projects[0].project_id.unwrap(), client_app_state, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
(dev_server, workspace)
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_leave_room(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_reconnect(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (mut server, client1) = TestServer::start1(cx1).await;
|
||||
let channel_id = server
|
||||
.make_channel("test", None, (&client1, cx1), &mut [])
|
||||
.await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
drop(client1);
|
||||
|
||||
let client2 = server.create_client(cx2, "user_a").await;
|
||||
|
||||
let store = cx2.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
store
|
||||
.update(cx2, |store, cx| {
|
||||
let projects = store.remote_projects();
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client2.app_state.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_remote_project_path_validation(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
let _channel_id = server
|
||||
.make_channel("test", None, (&client1, cx1), &mut [])
|
||||
.await;
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (_dev_server, _) =
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let store = cx1.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx1, |store, cx| {
|
||||
store.create_dev_server("server-2".to_string(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let _dev_server = server.create_dev_server(resp.access_token, cx3).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Creating a remote project with a path that does not exist should fail
|
||||
let result = store
|
||||
.update(cx1, |store, cx| {
|
||||
store.create_remote_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/notfound".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let error = result.unwrap_err();
|
||||
assert!(matches!(
|
||||
error.error_code(),
|
||||
ErrorCode::RemoteProjectPathDoesNotExist
|
||||
));
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ use crate::{
|
||||
tests::{rust_lang, TestServer},
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Redo, Rename, RevertSelectedHunks,
|
||||
@@ -18,7 +19,10 @@ use language::{
|
||||
language_settings::{AllLanguageSettings, InlayHintSettings},
|
||||
FakeLspAdapter,
|
||||
};
|
||||
use project::SERVER_PROGRESS_DEBOUNCE_TIMEOUT;
|
||||
use project::{
|
||||
project_settings::{InlineBlameSettings, ProjectSettings},
|
||||
SERVER_PROGRESS_DEBOUNCE_TIMEOUT,
|
||||
};
|
||||
use rpc::RECEIVE_TIMEOUT;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
@@ -732,12 +736,60 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
6..9
|
||||
);
|
||||
rename.editor.update(cx, |rename_editor, cx| {
|
||||
let rename_selection = rename_editor.selections.newest::<usize>(cx);
|
||||
assert_eq!(
|
||||
rename_selection.range(),
|
||||
0..3,
|
||||
"Rename that was triggered from zero selection caret, should propose the whole word."
|
||||
);
|
||||
rename_editor.buffer().update(cx, |rename_buffer, cx| {
|
||||
rename_buffer.edit([(0..3, "THREE")], None, cx);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Cancel the rename, and repeat the same, but use selections instead of cursor movement
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
editor.cancel(&editor::actions::Cancel, cx);
|
||||
});
|
||||
let prepare_rename = editor_b.update(cx_b, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([7..8]));
|
||||
editor.rename(&Rename, cx).unwrap()
|
||||
});
|
||||
|
||||
fake_language_server
|
||||
.handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
|
||||
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
|
||||
assert_eq!(params.position, lsp::Position::new(0, 8));
|
||||
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
|
||||
lsp::Position::new(0, 6),
|
||||
lsp::Position::new(0, 9),
|
||||
))))
|
||||
})
|
||||
.next()
|
||||
.await
|
||||
.unwrap();
|
||||
prepare_rename.await.unwrap();
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
use editor::ToOffset;
|
||||
let rename = editor.pending_rename().unwrap();
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let lsp_rename_start = rename.range.start.to_offset(&buffer);
|
||||
let lsp_rename_end = rename.range.end.to_offset(&buffer);
|
||||
assert_eq!(lsp_rename_start..lsp_rename_end, 6..9);
|
||||
rename.editor.update(cx, |rename_editor, cx| {
|
||||
let rename_selection = rename_editor.selections.newest::<usize>(cx);
|
||||
assert_eq!(
|
||||
rename_selection.range(),
|
||||
1..2,
|
||||
"Rename that was triggered from a selection, should have the same selection range in the rename proposal"
|
||||
);
|
||||
rename_editor.buffer().update(cx, |rename_buffer, cx| {
|
||||
rename_buffer.edit([(0..lsp_rename_end - lsp_rename_start, "THREE")], None, cx);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
let confirm_rename = editor_b.update(cx_b, |editor, cx| {
|
||||
Editor::confirm_rename(editor, &ConfirmRename, cx).unwrap()
|
||||
});
|
||||
@@ -1999,6 +2051,26 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
// Turn inline-blame-off by default so no state is transferred without us explicitly doing so
|
||||
let inline_blame_off_settings = Some(InlineBlameSettings {
|
||||
enabled: false,
|
||||
delay_ms: None,
|
||||
min_column: None,
|
||||
});
|
||||
cx_a.update(|cx| {
|
||||
cx.update_global(|store: &mut SettingsStore, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |settings| {
|
||||
settings.git.inline_blame = inline_blame_off_settings;
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
cx.update_global(|store: &mut SettingsStore, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |settings| {
|
||||
settings.git.inline_blame = inline_blame_off_settings;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
@@ -2018,15 +2090,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
blame_entry("3a3a3a", 2..3),
|
||||
blame_entry("4c4c4c", 3..4),
|
||||
],
|
||||
permalinks: [
|
||||
("1b1b1b", "http://example.com/codehost/idx-0"),
|
||||
("0d0d0d", "http://example.com/codehost/idx-1"),
|
||||
("3a3a3a", "http://example.com/codehost/idx-2"),
|
||||
("4c4c4c", "http://example.com/codehost/idx-3"),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|(sha, url)| (sha.parse().unwrap(), url.parse().unwrap()))
|
||||
.collect(),
|
||||
permalinks: HashMap::default(), // This field is deprecrated
|
||||
messages: [
|
||||
("1b1b1b", "message for idx-0"),
|
||||
("0d0d0d", "message for idx-1"),
|
||||
@@ -2036,6 +2100,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
.into_iter()
|
||||
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
|
||||
.collect(),
|
||||
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
|
||||
};
|
||||
client_a.fs().set_blame_for_repo(
|
||||
Path::new("/my-repo/.git"),
|
||||
@@ -2100,13 +2165,11 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
|
||||
blame.update(cx, |blame, _| {
|
||||
for (idx, entry) in entries.iter().flatten().enumerate() {
|
||||
let details = blame.details_for_entry(entry).unwrap();
|
||||
assert_eq!(details.message, format!("message for idx-{}", idx));
|
||||
assert_eq!(
|
||||
blame.permalink_for_entry(entry).unwrap().to_string(),
|
||||
format!("http://example.com/codehost/idx-{}", idx)
|
||||
);
|
||||
assert_eq!(
|
||||
blame.message_for_entry(entry).unwrap(),
|
||||
format!("message for idx-{}", idx)
|
||||
details.permalink.unwrap().to_string(),
|
||||
format!("https://github.com/zed-industries/zed/commit/{}", entry.sha)
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -9,8 +9,9 @@ use anyhow::{anyhow, Result};
|
||||
use call::{room, ActiveCall, ParticipantLocation, Room};
|
||||
use client::{User, RECEIVE_TIMEOUT};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::{repository::GitFileStatus, FakeFs, Fs as _, RemoveOptions};
|
||||
use fs::{FakeFs, Fs as _, RemoveOptions};
|
||||
use futures::{channel::mpsc, StreamExt as _};
|
||||
use git::repository::GitFileStatus;
|
||||
use gpui::{
|
||||
px, size, AppContext, BackgroundExecutor, BorrowAppContext, Model, Modifiers, MouseButton,
|
||||
MouseDownEvent, TestAppContext,
|
||||
@@ -3742,6 +3743,10 @@ async fn test_leaving_project(
|
||||
|
||||
buffer_b2.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "a-contents"));
|
||||
|
||||
project_a.read_with(cx_a, |project, _| {
|
||||
assert_eq!(project.collaborators().len(), 2);
|
||||
});
|
||||
|
||||
// Drop client B's connection and ensure client A and client C observe client B leaving.
|
||||
client_b.disconnect(&cx_b.to_async());
|
||||
executor.advance_clock(RECONNECT_TIMEOUT);
|
||||
|
||||
@@ -5,8 +5,9 @@ use async_trait::async_trait;
|
||||
use call::ActiveCall;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use editor::Bias;
|
||||
use fs::{repository::GitFileStatus, FakeFs, Fs as _};
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use futures::StreamExt;
|
||||
use git::repository::GitFileStatus;
|
||||
use gpui::{BackgroundExecutor, Model, TestAppContext};
|
||||
use language::{
|
||||
range_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, PointUtf16,
|
||||
@@ -1347,13 +1348,11 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
client.username
|
||||
);
|
||||
|
||||
let host_saved_version_fingerprint =
|
||||
host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
|
||||
let guest_saved_version_fingerprint =
|
||||
guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
|
||||
let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty());
|
||||
let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty());
|
||||
assert_eq!(
|
||||
guest_saved_version_fingerprint, host_saved_version_fingerprint,
|
||||
"guest {} saved fingerprint does not match host's for path {path:?} in project {project_id}",
|
||||
guest_is_dirty, host_is_dirty,
|
||||
"guest {} dirty state does not match host's for path {path:?} in project {project_id}",
|
||||
client.username
|
||||
);
|
||||
|
||||
|
||||
@@ -284,6 +284,7 @@ impl TestServer {
|
||||
collab_ui::init(&app_state, cx);
|
||||
file_finder::init(cx);
|
||||
menu::init();
|
||||
remote_projects::init(client.clone(), cx);
|
||||
settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap();
|
||||
});
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ db.workspace = true
|
||||
editor.workspace = true
|
||||
emojis.workspace = true
|
||||
extensions_ui.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -234,10 +234,11 @@ impl ChatPanel {
|
||||
let channel_id = chat.read(cx).channel_id;
|
||||
{
|
||||
self.markdown_data.clear();
|
||||
let chat = chat.read(cx);
|
||||
self.message_list.reset(chat.message_count());
|
||||
|
||||
let chat = chat.read(cx);
|
||||
let channel_name = chat.channel(cx).map(|channel| channel.name.clone());
|
||||
let message_count = chat.message_count();
|
||||
self.message_list.reset(message_count);
|
||||
self.message_editor.update(cx, |editor, cx| {
|
||||
editor.set_channel(channel_id, channel_name, cx);
|
||||
editor.clear_reply_to_message_id();
|
||||
@@ -531,6 +532,8 @@ impl ChatPanel {
|
||||
&self.languages,
|
||||
self.client.id(),
|
||||
&message,
|
||||
self.local_timezone,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
el.child(
|
||||
@@ -744,6 +747,8 @@ impl ChatPanel {
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
current_user_id: u64,
|
||||
message: &channel::ChannelMessage,
|
||||
local_timezone: UtcOffset,
|
||||
cx: &AppContext,
|
||||
) -> RichText {
|
||||
let mentions = message
|
||||
.mentions
|
||||
@@ -754,24 +759,39 @@ impl ChatPanel {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
const MESSAGE_UPDATED: &str = " (edited)";
|
||||
const MESSAGE_EDITED: &str = " (edited)";
|
||||
|
||||
let mut body = message.body.clone();
|
||||
|
||||
if message.edited_at.is_some() {
|
||||
body.push_str(MESSAGE_UPDATED);
|
||||
body.push_str(MESSAGE_EDITED);
|
||||
}
|
||||
|
||||
let mut rich_text = rich_text::render_rich_text(body, &mentions, language_registry, None);
|
||||
let mut rich_text = RichText::new(body, &mentions, language_registry);
|
||||
|
||||
if message.edited_at.is_some() {
|
||||
let range = (rich_text.text.len() - MESSAGE_EDITED.len())..rich_text.text.len();
|
||||
rich_text.highlights.push((
|
||||
(rich_text.text.len() - MESSAGE_UPDATED.len())..rich_text.text.len(),
|
||||
range.clone(),
|
||||
Highlight::Highlight(HighlightStyle {
|
||||
fade_out: Some(0.8),
|
||||
color: Some(cx.theme().colors().text_muted),
|
||||
..Default::default()
|
||||
}),
|
||||
));
|
||||
|
||||
if let Some(edit_timestamp) = message.edited_at {
|
||||
let edit_timestamp_text = time_format::format_localized_timestamp(
|
||||
edit_timestamp,
|
||||
OffsetDateTime::now_utc(),
|
||||
local_timezone,
|
||||
time_format::TimestampFormat::Absolute,
|
||||
);
|
||||
|
||||
rich_text.custom_ranges.push(range);
|
||||
rich_text.set_tooltip_builder_for_custom_ranges(move |_, _, cx| {
|
||||
Some(Tooltip::text(edit_timestamp_text.clone(), cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
rich_text
|
||||
}
|
||||
@@ -1176,7 +1196,13 @@ mod tests {
|
||||
edited_at: None,
|
||||
};
|
||||
|
||||
let message = ChatPanel::render_markdown_with_mentions(&language_registry, 102, &message);
|
||||
let message = ChatPanel::render_markdown_with_mentions(
|
||||
&language_registry,
|
||||
102,
|
||||
&message,
|
||||
UtcOffset::UTC,
|
||||
cx,
|
||||
);
|
||||
|
||||
// Note that the "'" was replaced with ’ due to smart punctuation.
|
||||
let (body, ranges) = marked_text_ranges("«hi», «@abc», let’s «call» «@fgh»", false);
|
||||
@@ -1224,7 +1250,13 @@ mod tests {
|
||||
edited_at: None,
|
||||
};
|
||||
|
||||
let message = ChatPanel::render_markdown_with_mentions(&language_registry, 102, &message);
|
||||
let message = ChatPanel::render_markdown_with_mentions(
|
||||
&language_registry,
|
||||
102,
|
||||
&message,
|
||||
UtcOffset::UTC,
|
||||
cx,
|
||||
);
|
||||
|
||||
// Note that the "'" was replaced with ’ due to smart punctuation.
|
||||
let (body, ranges) =
|
||||
@@ -1265,7 +1297,13 @@ mod tests {
|
||||
edited_at: None,
|
||||
};
|
||||
|
||||
let message = ChatPanel::render_markdown_with_mentions(&language_registry, 102, &message);
|
||||
let message = ChatPanel::render_markdown_with_mentions(
|
||||
&language_registry,
|
||||
102,
|
||||
&message,
|
||||
UtcOffset::UTC,
|
||||
cx,
|
||||
);
|
||||
|
||||
// Note that the "'" was replaced with ’ due to smart punctuation.
|
||||
let (body, ranges) = marked_text_ranges(
|
||||
|
||||
@@ -557,6 +557,7 @@ mod tests {
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use language::{Language, LanguageConfig};
|
||||
use project::Project;
|
||||
use rpc::proto;
|
||||
use settings::SettingsStore;
|
||||
use util::{http::FakeHttpClient, test::marked_text_ranges};
|
||||
@@ -630,6 +631,7 @@ mod tests {
|
||||
let client = Client::new(clock, http.clone(), cx);
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
client::init(&client, cx);
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
mod channel_modal;
|
||||
mod contact_finder;
|
||||
mod dev_server_modal;
|
||||
|
||||
use self::channel_modal::ChannelModal;
|
||||
use self::dev_server_modal::DevServerModal;
|
||||
use crate::{
|
||||
channel_view::ChannelView, chat_panel::ChatPanel, face_pile::FacePile,
|
||||
CollaborationPanelSettings,
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore, RemoteProject};
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
use contact_finder::ContactFinder;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use feature_flags::{self, FeatureFlagAppExt};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions, anchored, canvas, deferred, div, fill, list, point, prelude::*, px, AnyElement,
|
||||
@@ -27,7 +24,7 @@ use gpui::{
|
||||
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
|
||||
use project::{Fs, Project};
|
||||
use rpc::{
|
||||
proto::{self, ChannelVisibility, DevServerStatus, PeerId},
|
||||
proto::{self, ChannelVisibility, PeerId},
|
||||
ErrorCode, ErrorExt,
|
||||
};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
@@ -191,7 +188,6 @@ enum ListEntry {
|
||||
id: ProjectId,
|
||||
name: SharedString,
|
||||
},
|
||||
RemoteProject(channel::RemoteProject),
|
||||
Contact {
|
||||
contact: Arc<Contact>,
|
||||
calling: bool,
|
||||
@@ -282,23 +278,10 @@ impl CollabPanel {
|
||||
.push(cx.observe(&this.user_store, |this, _, cx| {
|
||||
this.update_entries(true, cx)
|
||||
}));
|
||||
let mut has_opened = false;
|
||||
this.subscriptions.push(cx.observe(
|
||||
&this.channel_store,
|
||||
move |this, channel_store, cx| {
|
||||
if !has_opened {
|
||||
if !channel_store
|
||||
.read(cx)
|
||||
.dev_servers_for_id(ChannelId(1))
|
||||
.is_empty()
|
||||
{
|
||||
this.manage_remote_projects(ChannelId(1), cx);
|
||||
has_opened = true;
|
||||
}
|
||||
}
|
||||
this.subscriptions
|
||||
.push(cx.observe(&this.channel_store, move |this, _, cx| {
|
||||
this.update_entries(true, cx)
|
||||
},
|
||||
));
|
||||
}));
|
||||
this.subscriptions
|
||||
.push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx)));
|
||||
this.subscriptions.push(cx.subscribe(
|
||||
@@ -586,7 +569,6 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
let hosted_projects = channel_store.projects_for_id(channel.id);
|
||||
let remote_projects = channel_store.remote_projects_for_id(channel.id);
|
||||
let has_children = channel_store
|
||||
.channel_at_index(mat.candidate_id + 1)
|
||||
.map_or(false, |next_channel| {
|
||||
@@ -624,12 +606,6 @@ impl CollabPanel {
|
||||
for (name, id) in hosted_projects {
|
||||
self.entries.push(ListEntry::HostedProject { id, name });
|
||||
}
|
||||
|
||||
if cx.has_flag::<feature_flags::Remoting>() {
|
||||
for remote_project in remote_projects {
|
||||
self.entries.push(ListEntry::RemoteProject(remote_project));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1089,59 +1065,6 @@ impl CollabPanel {
|
||||
.tooltip(move |cx| Tooltip::text("Open Project", cx))
|
||||
}
|
||||
|
||||
fn render_remote_project(
|
||||
&self,
|
||||
remote_project: &RemoteProject,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
let id = remote_project.id;
|
||||
let name = remote_project.name.clone();
|
||||
let maybe_project_id = remote_project.project_id;
|
||||
|
||||
let dev_server = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
.find_dev_server_by_id(remote_project.dev_server_id);
|
||||
|
||||
let tooltip_text = SharedString::from(match dev_server {
|
||||
Some(dev_server) => format!("Open Remote Project ({})", dev_server.name),
|
||||
None => "Open Remote Project".to_string(),
|
||||
});
|
||||
|
||||
let dev_server_is_online = dev_server.map(|s| s.status) == Some(DevServerStatus::Online);
|
||||
|
||||
let dev_server_text_color = if dev_server_is_online {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Disabled
|
||||
};
|
||||
|
||||
ListItem::new(ElementId::NamedInteger(
|
||||
"remote-project".into(),
|
||||
id.0 as usize,
|
||||
))
|
||||
.indent_level(2)
|
||||
.indent_step_size(px(20.))
|
||||
.selected(is_selected)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
//TODO display error message if dev server is offline
|
||||
if dev_server_is_online {
|
||||
if let Some(project_id) = maybe_project_id {
|
||||
this.join_remote_project(project_id, cx);
|
||||
}
|
||||
}
|
||||
}))
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(IconButton::new(0, IconName::FileTree).icon_color(dev_server_text_color)),
|
||||
)
|
||||
.child(Label::new(name.clone()).color(dev_server_text_color))
|
||||
.tooltip(move |cx| Tooltip::text(tooltip_text.clone(), cx))
|
||||
}
|
||||
|
||||
fn has_subchannels(&self, ix: usize) -> bool {
|
||||
self.entries.get(ix).map_or(false, |entry| {
|
||||
if let ListEntry::Channel { has_children, .. } = entry {
|
||||
@@ -1343,24 +1266,11 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
if self.channel_store.read(cx).is_root_channel(channel_id) {
|
||||
context_menu = context_menu
|
||||
.separator()
|
||||
.entry(
|
||||
"Manage Members",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| {
|
||||
this.manage_members(channel_id, cx)
|
||||
}),
|
||||
)
|
||||
.when(cx.has_flag::<feature_flags::Remoting>(), |context_menu| {
|
||||
context_menu.entry(
|
||||
"Manage Remote Projects",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| {
|
||||
this.manage_remote_projects(channel_id, cx)
|
||||
}),
|
||||
)
|
||||
})
|
||||
context_menu = context_menu.separator().entry(
|
||||
"Manage Members",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| this.manage_members(channel_id, cx)),
|
||||
)
|
||||
} else {
|
||||
context_menu = context_menu.entry(
|
||||
"Move this channel",
|
||||
@@ -1624,12 +1534,6 @@ impl CollabPanel {
|
||||
} => {
|
||||
// todo()
|
||||
}
|
||||
ListEntry::RemoteProject(project) => {
|
||||
if let Some(project_id) = project.project_id {
|
||||
self.join_remote_project(project_id, cx)
|
||||
}
|
||||
}
|
||||
|
||||
ListEntry::OutgoingRequest(_) => {}
|
||||
ListEntry::ChannelEditor { .. } => {}
|
||||
}
|
||||
@@ -1801,18 +1705,6 @@ impl CollabPanel {
|
||||
self.show_channel_modal(channel_id, channel_modal::Mode::ManageMembers, cx);
|
||||
}
|
||||
|
||||
fn manage_remote_projects(&mut self, channel_id: ChannelId, cx: &mut ViewContext<Self>) {
|
||||
let channel_store = self.channel_store.clone();
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, |cx| {
|
||||
DevServerModal::new(channel_store.clone(), channel_id, cx)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn remove_selected_channel(&mut self, _: &Remove, cx: &mut ViewContext<Self>) {
|
||||
if let Some(channel) = self.selected_channel() {
|
||||
self.remove_channel(channel.id, cx)
|
||||
@@ -2113,18 +2005,6 @@ impl CollabPanel {
|
||||
.detach_and_prompt_err("Failed to join channel", cx, |_, _| None)
|
||||
}
|
||||
|
||||
fn join_remote_project(&mut self, project_id: ProjectId, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
workspace::join_remote_project(project_id, app_state, cx).detach_and_prompt_err(
|
||||
"Failed to join project",
|
||||
cx,
|
||||
|_, _| None,
|
||||
)
|
||||
}
|
||||
|
||||
fn join_channel_chat(&mut self, channel_id: ChannelId, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
@@ -2260,9 +2140,6 @@ impl CollabPanel {
|
||||
ListEntry::HostedProject { id, name } => self
|
||||
.render_channel_project(*id, name, is_selected, cx)
|
||||
.into_any_element(),
|
||||
ListEntry::RemoteProject(remote_project) => self
|
||||
.render_remote_project(remote_project, is_selected, cx)
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3005,11 +2882,6 @@ impl PartialEq for ListEntry {
|
||||
return id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::RemoteProject(project) => {
|
||||
if let ListEntry::RemoteProject(other) = other {
|
||||
return project.id == other.id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
@@ -3075,7 +2947,7 @@ impl Render for DraggedChannelView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl Element {
|
||||
let ui_font = ThemeSettings::get_global(cx).ui_font.family.clone();
|
||||
h_flex()
|
||||
.font(ui_font)
|
||||
.font_family(ui_font)
|
||||
.bg(cx.theme().colors().background)
|
||||
.w(self.width)
|
||||
.p_1()
|
||||
|
||||
@@ -1,622 +0,0 @@
|
||||
use channel::{ChannelStore, DevServer, RemoteProject};
|
||||
use client::{ChannelId, DevServerId, RemoteProjectId};
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
AppContext, ClipboardItem, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model,
|
||||
ScrollHandle, Task, View, ViewContext,
|
||||
};
|
||||
use rpc::proto::{self, CreateDevServerResponse, DevServerStatus};
|
||||
use ui::{prelude::*, Indicator, List, ListHeader, ModalContent, ModalHeader, Tooltip};
|
||||
use util::ResultExt;
|
||||
use workspace::ModalView;
|
||||
|
||||
pub struct DevServerModal {
|
||||
mode: Mode,
|
||||
focus_handle: FocusHandle,
|
||||
scroll_handle: ScrollHandle,
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
remote_project_name_editor: View<Editor>,
|
||||
remote_project_path_editor: View<Editor>,
|
||||
dev_server_name_editor: View<Editor>,
|
||||
_subscriptions: [gpui::Subscription; 2],
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct CreateDevServer {
|
||||
creating: Option<Task<()>>,
|
||||
dev_server: Option<CreateDevServerResponse>,
|
||||
}
|
||||
|
||||
struct CreateRemoteProject {
|
||||
dev_server_id: DevServerId,
|
||||
creating: Option<Task<()>>,
|
||||
remote_project: Option<proto::RemoteProject>,
|
||||
}
|
||||
|
||||
enum Mode {
|
||||
Default,
|
||||
CreateRemoteProject(CreateRemoteProject),
|
||||
CreateDevServer(CreateDevServer),
|
||||
}
|
||||
|
||||
impl DevServerModal {
|
||||
pub fn new(
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let name_editor = cx.new_view(|cx| Editor::single_line(cx));
|
||||
let path_editor = cx.new_view(|cx| Editor::single_line(cx));
|
||||
let dev_server_name_editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::single_line(cx);
|
||||
editor.set_placeholder_text("Dev server name", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let subscriptions = [
|
||||
cx.observe(&channel_store, |_, _, cx| {
|
||||
cx.notify();
|
||||
}),
|
||||
cx.on_focus_out(&focus_handle, |_, _cx| { /* cx.emit(DismissEvent) */ }),
|
||||
];
|
||||
|
||||
Self {
|
||||
mode: Mode::Default,
|
||||
focus_handle,
|
||||
scroll_handle: ScrollHandle::new(),
|
||||
channel_store,
|
||||
channel_id,
|
||||
remote_project_name_editor: name_editor,
|
||||
remote_project_path_editor: path_editor,
|
||||
dev_server_name_editor,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_remote_project(
|
||||
&mut self,
|
||||
dev_server_id: DevServerId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let channel_id = self.channel_id;
|
||||
let name = self
|
||||
.remote_project_name_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
let path = self
|
||||
.remote_project_path_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if name == "" {
|
||||
return;
|
||||
}
|
||||
if path == "" {
|
||||
return;
|
||||
}
|
||||
|
||||
let create = self.channel_store.update(cx, |store, cx| {
|
||||
store.create_remote_project(channel_id, dev_server_id, name, path, cx)
|
||||
});
|
||||
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
let result = create.await;
|
||||
if let Err(e) = &result {
|
||||
cx.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
"Failed to create project",
|
||||
Some(&format!("{:?}. Please try again.", e)),
|
||||
&["Ok"],
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: None,
|
||||
remote_project: result.ok().and_then(|r| r.remote_project),
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
|
||||
self.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: Some(task),
|
||||
remote_project: None,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn create_dev_server(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let name = self
|
||||
.dev_server_name_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if name == "" {
|
||||
return;
|
||||
}
|
||||
|
||||
let dev_server = self.channel_store.update(cx, |store, cx| {
|
||||
store.create_dev_server(self.channel_id, name.clone(), cx)
|
||||
});
|
||||
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
match dev_server.await {
|
||||
Ok(dev_server) => {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateDevServer(CreateDevServer {
|
||||
creating: None,
|
||||
dev_server: Some(dev_server),
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
Err(e) => {
|
||||
cx.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
"Failed to create server",
|
||||
Some(&format!("{:?}. Please try again.", e)),
|
||||
&["Ok"],
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateDevServer(Default::default());
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::CreateDevServer(CreateDevServer {
|
||||
creating: Some(task),
|
||||
dev_server: None,
|
||||
});
|
||||
cx.notify()
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
|
||||
match self.mode {
|
||||
Mode::Default => cx.emit(DismissEvent),
|
||||
Mode::CreateRemoteProject(_) | Mode::CreateDevServer(_) => {
|
||||
self.mode = Mode::Default;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_dev_server(
|
||||
&mut self,
|
||||
dev_server: &DevServer,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let dev_server_id = dev_server.id;
|
||||
let status = dev_server.status;
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.group("dev-server")
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
div()
|
||||
.id(("status", dev_server.id.0))
|
||||
.relative()
|
||||
.child(Icon::new(IconName::Server).size(IconSize::Small))
|
||||
.child(
|
||||
div().absolute().bottom_0().left(rems_from_px(8.0)).child(
|
||||
Indicator::dot().color(match status {
|
||||
DevServerStatus::Online => Color::Created,
|
||||
DevServerStatus::Offline => Color::Deleted,
|
||||
}),
|
||||
),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
match status {
|
||||
DevServerStatus::Online => "Online",
|
||||
DevServerStatus::Offline => "Offline",
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(dev_server.name.clone())
|
||||
.child(
|
||||
h_flex()
|
||||
.visible_on_hover("dev-server")
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new("edit-dev-server", IconName::Pencil)
|
||||
.disabled(true) //TODO implement this on the collab side
|
||||
.tooltip(|cx| {
|
||||
Tooltip::text("Coming Soon - Edit dev server", cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("remove-dev-server", IconName::Trash)
|
||||
.disabled(true) //TODO implement this on the collab side
|
||||
.tooltip(|cx| {
|
||||
Tooltip::text("Coming Soon - Remove dev server", cx)
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().gap_1().child(
|
||||
IconButton::new("add-remote-project", IconName::Plus)
|
||||
.tooltip(|cx| Tooltip::text("Add a remote project", cx))
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: None,
|
||||
remote_project: None,
|
||||
});
|
||||
cx.notify();
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.bg(cx.theme().colors().title_bar_background)
|
||||
.border()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.rounded_md()
|
||||
.my_1()
|
||||
.py_0p5()
|
||||
.px_3()
|
||||
.child(
|
||||
List::new().empty_message("No projects.").children(
|
||||
channel_store
|
||||
.remote_projects_for_id(dev_server.channel_id)
|
||||
.iter()
|
||||
.filter_map(|remote_project| {
|
||||
if remote_project.dev_server_id == dev_server.id {
|
||||
Some(self.render_remote_project(remote_project, cx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
// .child(div().ml_8().child(
|
||||
// Button::new(("add-project", dev_server_id.0), "Add Project").on_click(cx.listener(
|
||||
// move |this, _, cx| {
|
||||
// this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
// dev_server_id,
|
||||
// creating: None,
|
||||
// remote_project: None,
|
||||
// });
|
||||
// cx.notify();
|
||||
// },
|
||||
// )),
|
||||
// ))
|
||||
}
|
||||
|
||||
fn render_remote_project(
|
||||
&mut self,
|
||||
project: &RemoteProject,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Icon::new(IconName::FileTree))
|
||||
.child(Label::new(project.name.clone()))
|
||||
.child(Label::new(format!("({})", project.path.clone())).color(Color::Muted))
|
||||
}
|
||||
|
||||
fn render_create_dev_server(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let Mode::CreateDevServer(CreateDevServer {
|
||||
creating,
|
||||
dev_server,
|
||||
}) = &self.mode
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
self.dev_server_name_editor.update(cx, |editor, _| {
|
||||
editor.set_read_only(creating.is_some() || dev_server.is_some())
|
||||
});
|
||||
v_flex()
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
v_flex().py_0p5().px_1().child(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.py_0p5()
|
||||
.child(
|
||||
IconButton::new("back", IconName::ArrowLeft)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(cx.listener(|this, _: &gpui::ClickEvent, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(Headline::new("Register dev server")),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Name")
|
||||
.child(self.dev_server_name_editor.clone())
|
||||
.on_action(
|
||||
cx.listener(|this, _: &menu::Confirm, cx| this.create_dev_server(cx)),
|
||||
)
|
||||
.when(creating.is_none() && dev_server.is_none(), |div| {
|
||||
div.child(
|
||||
Button::new("create-dev-server", "Create").on_click(cx.listener(
|
||||
move |this, _, cx| {
|
||||
this.create_dev_server(cx);
|
||||
},
|
||||
)),
|
||||
)
|
||||
})
|
||||
.when(creating.is_some() && dev_server.is_none(), |div| {
|
||||
div.child(Button::new("create-dev-server", "Creating...").disabled(true))
|
||||
}),
|
||||
)
|
||||
.when_some(dev_server.clone(), |div, dev_server| {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let status = channel_store
|
||||
.find_dev_server_by_id(DevServerId(dev_server.dev_server_id))
|
||||
.map(|server| server.status)
|
||||
.unwrap_or(DevServerStatus::Offline);
|
||||
let instructions = SharedString::from(format!(
|
||||
"zed --dev-server-token {}",
|
||||
dev_server.access_token
|
||||
));
|
||||
div.child(
|
||||
v_flex()
|
||||
.ml_8()
|
||||
.gap_2()
|
||||
.child(Label::new(format!(
|
||||
"Please log into `{}` and run:",
|
||||
dev_server.name
|
||||
)))
|
||||
.child(instructions.clone())
|
||||
.child(
|
||||
IconButton::new("copy-access-token", IconName::Copy)
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
instructions.to_string(),
|
||||
))
|
||||
}))
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(|cx| Tooltip::text("Copy access token", cx)),
|
||||
)
|
||||
.when(status == DevServerStatus::Offline, |this| {
|
||||
this.child(Label::new("Waiting for connection..."))
|
||||
})
|
||||
.when(status == DevServerStatus::Online, |this| {
|
||||
this.child(Label::new("Connection established! 🎊")).child(
|
||||
Button::new("done", "Done").on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn render_default(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let dev_servers = channel_store.dev_servers_for_id(self.channel_id);
|
||||
// let dev_servers = Vec::new();
|
||||
|
||||
v_flex()
|
||||
.id("scroll-container")
|
||||
.h_full()
|
||||
.overflow_y_scroll()
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
ModalHeader::new("Manage Remote Project")
|
||||
.child(Headline::new("Remote Projects").size(HeadlineSize::Small)),
|
||||
)
|
||||
.child(
|
||||
ModalContent::new().child(
|
||||
List::new()
|
||||
.empty_message("No dev servers registered.")
|
||||
.header(Some(
|
||||
ListHeader::new("Dev Servers").end_slot(
|
||||
Button::new("register-dev-server-button", "New Server")
|
||||
.icon(IconName::Plus)
|
||||
.icon_position(IconPosition::Start)
|
||||
.tooltip(|cx| Tooltip::text("Register a new dev server", cx))
|
||||
.on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::CreateDevServer(Default::default());
|
||||
this.dev_server_name_editor
|
||||
.read(cx)
|
||||
.focus_handle(cx)
|
||||
.focus(cx);
|
||||
cx.notify();
|
||||
})),
|
||||
),
|
||||
))
|
||||
.children(dev_servers.iter().map(|dev_server| {
|
||||
self.render_dev_server(dev_server, cx).into_any_element()
|
||||
})),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_create_project(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating,
|
||||
remote_project,
|
||||
}) = &self.mode
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let (dev_server_name, dev_server_status) = channel_store
|
||||
.find_dev_server_by_id(*dev_server_id)
|
||||
.map(|server| (server.name.clone(), server.status))
|
||||
.unwrap_or((SharedString::from(""), DevServerStatus::Offline));
|
||||
v_flex()
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
ModalHeader::new("Manage Remote Project")
|
||||
.child(Headline::new("Manage Remote Projects")),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.py_0p5()
|
||||
.px_1()
|
||||
.child(div().px_1().py_0p5().child(
|
||||
IconButton::new("back", IconName::ArrowLeft).on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify()
|
||||
},
|
||||
)),
|
||||
))
|
||||
.child("Add Project..."),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child(
|
||||
div()
|
||||
.id(("status", dev_server_id.0))
|
||||
.relative()
|
||||
.child(Icon::new(IconName::Server))
|
||||
.child(div().absolute().bottom_0().left(rems_from_px(12.0)).child(
|
||||
Indicator::dot().color(match dev_server_status {
|
||||
DevServerStatus::Online => Color::Created,
|
||||
DevServerStatus::Offline => Color::Deleted,
|
||||
}),
|
||||
))
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
match dev_server_status {
|
||||
DevServerStatus::Online => "Online",
|
||||
DevServerStatus::Offline => "Offline",
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(dev_server_name.clone()),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Name")
|
||||
.child(self.remote_project_name_editor.clone())
|
||||
.on_action(cx.listener(|this, _: &menu::Confirm, cx| {
|
||||
cx.focus_view(&this.remote_project_path_editor)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Path")
|
||||
.child(self.remote_project_path_editor.clone())
|
||||
.on_action(
|
||||
cx.listener(|this, _: &menu::Confirm, cx| this.create_dev_server(cx)),
|
||||
)
|
||||
.when(creating.is_none() && remote_project.is_none(), |div| {
|
||||
div.child(Button::new("create-remote-server", "Create").on_click({
|
||||
let dev_server_id = *dev_server_id;
|
||||
cx.listener(move |this, _, cx| {
|
||||
this.create_remote_project(dev_server_id, cx)
|
||||
})
|
||||
}))
|
||||
})
|
||||
.when(creating.is_some(), |div| {
|
||||
div.child(Button::new("create-dev-server", "Creating...").disabled(true))
|
||||
}),
|
||||
)
|
||||
.when_some(remote_project.clone(), |div, remote_project| {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let status = channel_store
|
||||
.find_remote_project_by_id(RemoteProjectId(remote_project.id))
|
||||
.map(|project| {
|
||||
if project.project_id.is_some() {
|
||||
DevServerStatus::Online
|
||||
} else {
|
||||
DevServerStatus::Offline
|
||||
}
|
||||
})
|
||||
.unwrap_or(DevServerStatus::Offline);
|
||||
div.child(
|
||||
v_flex()
|
||||
.ml_5()
|
||||
.ml_8()
|
||||
.gap_2()
|
||||
.when(status == DevServerStatus::Offline, |this| {
|
||||
this.child(Label::new("Waiting for project..."))
|
||||
})
|
||||
.when(status == DevServerStatus::Online, |this| {
|
||||
this.child(Label::new("Project online! 🎊")).child(
|
||||
Button::new("done", "Done").on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
impl ModalView for DevServerModal {}
|
||||
|
||||
impl FocusableView for DevServerModal {
|
||||
fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for DevServerModal {}
|
||||
|
||||
impl Render for DevServerModal {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.track_focus(&self.focus_handle)
|
||||
.elevation_3(cx)
|
||||
.key_context("DevServerModal")
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.pb_4()
|
||||
.w(rems(34.))
|
||||
.min_h(rems(20.))
|
||||
.max_h(rems(40.))
|
||||
.child(match &self.mode {
|
||||
Mode::Default => self.render_default(cx).into_any_element(),
|
||||
Mode::CreateRemoteProject(_) => self.render_create_project(cx).into_any_element(),
|
||||
Mode::CreateDevServer(_) => self.render_create_dev_server(cx).into_any_element(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -171,44 +171,48 @@ impl Render for CollabTitlebarItem {
|
||||
let room = room.read(cx);
|
||||
let project = self.project.read(cx);
|
||||
let is_local = project.is_local();
|
||||
let is_shared = is_local && project.is_shared();
|
||||
let is_remote_project = project.remote_project_id().is_some();
|
||||
let is_shared = (is_local || is_remote_project) && project.is_shared();
|
||||
let is_muted = room.is_muted();
|
||||
let is_deafened = room.is_deafened().unwrap_or(false);
|
||||
let is_screen_sharing = room.is_screen_sharing();
|
||||
let can_use_microphone = room.can_use_microphone();
|
||||
let can_share_projects = room.can_share_projects();
|
||||
|
||||
this.when(is_local && can_share_projects, |this| {
|
||||
this.child(
|
||||
Button::new(
|
||||
"toggle_sharing",
|
||||
if is_shared { "Unshare" } else { "Share" },
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
if is_shared {
|
||||
"Stop sharing project with call participants"
|
||||
} else {
|
||||
"Share project with call participants"
|
||||
},
|
||||
cx,
|
||||
this.when(
|
||||
(is_local || is_remote_project) && can_share_projects,
|
||||
|this| {
|
||||
this.child(
|
||||
Button::new(
|
||||
"toggle_sharing",
|
||||
if is_shared { "Unshare" } else { "Share" },
|
||||
)
|
||||
})
|
||||
.style(ButtonStyle::Subtle)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.selected(is_shared)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener(
|
||||
move |this, _, cx| {
|
||||
if is_shared {
|
||||
this.unshare_project(&Default::default(), cx);
|
||||
} else {
|
||||
this.share_project(&Default::default(), cx);
|
||||
}
|
||||
},
|
||||
)),
|
||||
)
|
||||
})
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
if is_shared {
|
||||
"Stop sharing project with call participants"
|
||||
} else {
|
||||
"Share project with call participants"
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.style(ButtonStyle::Subtle)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.selected(is_shared)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener(
|
||||
move |this, _, cx| {
|
||||
if is_shared {
|
||||
this.unshare_project(&Default::default(), cx);
|
||||
} else {
|
||||
this.share_project(&Default::default(), cx);
|
||||
}
|
||||
},
|
||||
)),
|
||||
)
|
||||
},
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.child(
|
||||
@@ -406,7 +410,7 @@ impl CollabTitlebarItem {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn render_project_name(&self, cx: &mut ViewContext<Self>) -> impl Element {
|
||||
pub fn render_project_name(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let name = {
|
||||
let mut names = self.project.read(cx).visible_worktrees(cx).map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
@@ -423,15 +427,26 @@ impl CollabTitlebarItem {
|
||||
};
|
||||
|
||||
let workspace = self.workspace.clone();
|
||||
popover_menu("project_name_trigger")
|
||||
.trigger(
|
||||
Button::new("project_name_trigger", name)
|
||||
.when(!is_project_selected, |b| b.color(Color::Muted))
|
||||
.style(ButtonStyle::Subtle)
|
||||
.label_size(LabelSize::Small)
|
||||
.tooltip(move |cx| Tooltip::text("Recent Projects", cx)),
|
||||
)
|
||||
.menu(move |cx| Some(Self::render_project_popover(workspace.clone(), cx)))
|
||||
Button::new("project_name_trigger", name)
|
||||
.when(!is_project_selected, |b| b.color(Color::Muted))
|
||||
.style(ButtonStyle::Subtle)
|
||||
.label_size(LabelSize::Small)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::for_action(
|
||||
"Recent Projects",
|
||||
&recent_projects::OpenRecent {
|
||||
create_new_window: false,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
RecentProjects::open(workspace, false, cx);
|
||||
})
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn render_project_branch(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
|
||||
@@ -607,17 +622,6 @@ impl CollabTitlebarItem {
|
||||
Some(view)
|
||||
}
|
||||
|
||||
pub fn render_project_popover(
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: &mut WindowContext<'_>,
|
||||
) -> View<RecentProjects> {
|
||||
let view = RecentProjects::open_popover(workspace, cx);
|
||||
|
||||
let focus_handle = view.focus_handle(cx);
|
||||
cx.focus(&focus_handle);
|
||||
view
|
||||
}
|
||||
|
||||
fn render_connection_status(
|
||||
&self,
|
||||
status: &client::Status,
|
||||
|
||||
@@ -125,7 +125,7 @@ impl Render for IncomingCallNotification {
|
||||
|
||||
cx.set_rem_size(ui_font_size);
|
||||
|
||||
div().size_full().font(ui_font).child(
|
||||
div().size_full().font_family(ui_font).child(
|
||||
CollabNotification::new(
|
||||
self.state.call.calling_user.avatar_uri.clone(),
|
||||
Button::new("accept", "Accept").on_click({
|
||||
|
||||
@@ -129,7 +129,7 @@ impl Render for ProjectSharedNotification {
|
||||
|
||||
cx.set_rem_size(ui_font_size);
|
||||
|
||||
div().size_full().font(ui_font).child(
|
||||
div().size_full().font_family(ui_font).child(
|
||||
CollabNotification::new(
|
||||
self.owner.avatar_uri.clone(),
|
||||
Button::new("open", "Open").on_click(cx.listener(move |this, _event, cx| {
|
||||
|
||||
@@ -1255,7 +1255,6 @@ mod tests {
|
||||
&self,
|
||||
_: BufferId,
|
||||
_: &clock::Global,
|
||||
_: language::RopeFingerprint,
|
||||
_: language::LineEnding,
|
||||
_: Option<std::time::SystemTime>,
|
||||
_: &mut AppContext,
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
First, craft your test data. The examples folder shows a template for building a test-db, and can be ran with `cargo run --example [your-example]`.
|
||||
|
||||
To actually use and test your queries, import the generated DB file into https://sqliteonline.com/
|
||||
To actually use and test your queries, import the generated DB file into https://sqliteonline.com/
|
||||
|
||||
@@ -44,8 +44,6 @@ use workspace::{
|
||||
|
||||
actions!(diagnostics, [Deploy, ToggleWarnings]);
|
||||
|
||||
const CONTEXT_LINE_COUNT: u32 = 1;
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
ProjectDiagnosticsSettings::register(cx);
|
||||
cx.observe_new_views(ProjectDiagnosticsEditor::register)
|
||||
@@ -63,6 +61,7 @@ struct ProjectDiagnosticsEditor {
|
||||
paths_to_update: HashMap<LanguageServerId, HashSet<ProjectPath>>,
|
||||
current_diagnostics: HashMap<LanguageServerId, HashSet<ProjectPath>>,
|
||||
include_warnings: bool,
|
||||
context: u32,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -116,7 +115,8 @@ impl ProjectDiagnosticsEditor {
|
||||
workspace.register_action(Self::deploy);
|
||||
}
|
||||
|
||||
fn new(
|
||||
fn new_with_context(
|
||||
context: u32,
|
||||
project_handle: Model<Project>,
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
@@ -136,8 +136,15 @@ impl ProjectDiagnosticsEditor {
|
||||
.entry(*language_server_id)
|
||||
.or_default()
|
||||
.insert(path.clone());
|
||||
if this.editor.read(cx).selections.all::<usize>(cx).is_empty()
|
||||
&& !this.is_dirty(cx)
|
||||
|
||||
if this.is_dirty(cx) {
|
||||
return;
|
||||
}
|
||||
let selections = this.editor.read(cx).selections.all::<usize>(cx);
|
||||
if selections.len() < 2
|
||||
&& selections
|
||||
.first()
|
||||
.map_or(true, |selection| selection.end == selection.start)
|
||||
{
|
||||
this.update_excerpts(Some(*language_server_id), cx);
|
||||
}
|
||||
@@ -174,6 +181,7 @@ impl ProjectDiagnosticsEditor {
|
||||
let summary = project.diagnostic_summary(false, cx);
|
||||
let mut this = Self {
|
||||
project: project_handle,
|
||||
context,
|
||||
summary,
|
||||
workspace,
|
||||
excerpts,
|
||||
@@ -193,6 +201,19 @@ impl ProjectDiagnosticsEditor {
|
||||
this
|
||||
}
|
||||
|
||||
fn new(
|
||||
project_handle: Model<Project>,
|
||||
workspace: WeakView<Workspace>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
Self::new_with_context(
|
||||
editor::DEFAULT_MULTIBUFFER_CONTEXT,
|
||||
project_handle,
|
||||
workspace,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn deploy(workspace: &mut Workspace, _: &Deploy, cx: &mut ViewContext<Workspace>) {
|
||||
if let Some(existing) = workspace.item_of_type::<ProjectDiagnosticsEditor>(cx) {
|
||||
workspace.activate_item(&existing, cx);
|
||||
@@ -423,18 +444,16 @@ impl ProjectDiagnosticsEditor {
|
||||
let resolved_entry = entry.map(|e| e.resolve::<Point>(&snapshot));
|
||||
if let Some((range, start_ix)) = &mut pending_range {
|
||||
if let Some(entry) = resolved_entry.as_ref() {
|
||||
if entry.range.start.row
|
||||
<= range.end.row + 1 + CONTEXT_LINE_COUNT * 2
|
||||
{
|
||||
if entry.range.start.row <= range.end.row + 1 + self.context * 2 {
|
||||
range.end = range.end.max(entry.range.end);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let excerpt_start =
|
||||
Point::new(range.start.row.saturating_sub(CONTEXT_LINE_COUNT), 0);
|
||||
Point::new(range.start.row.saturating_sub(self.context), 0);
|
||||
let excerpt_end = snapshot.clip_point(
|
||||
Point::new(range.end.row + CONTEXT_LINE_COUNT, u32::MAX),
|
||||
Point::new(range.end.row + self.context, u32::MAX),
|
||||
Bias::Left,
|
||||
);
|
||||
let excerpt_id = excerpts
|
||||
@@ -893,7 +912,7 @@ mod tests {
|
||||
display_map::{BlockContext, TransformBlock},
|
||||
DisplayPoint, GutterDimensions,
|
||||
};
|
||||
use gpui::{px, Stateful, TestAppContext, VisualTestContext, WindowContext};
|
||||
use gpui::{px, AvailableSpace, Stateful, TestAppContext, VisualTestContext};
|
||||
use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped};
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
@@ -1023,69 +1042,73 @@ mod tests {
|
||||
|
||||
// Open the project diagnostics view while there are already diagnostics.
|
||||
let view = window.build_view(cx, |cx| {
|
||||
ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
|
||||
ProjectDiagnosticsEditor::new_with_context(
|
||||
1,
|
||||
project.clone(),
|
||||
workspace.downgrade(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor = view.update(cx, |view, _| view.editor.clone());
|
||||
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(15, "collapsed context".into()),
|
||||
(16, "diagnostic header".into()),
|
||||
(25, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(15, "collapsed context".into()),
|
||||
(16, "diagnostic header".into()),
|
||||
(25, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
|
||||
// Cursor is at the first diagnostic
|
||||
view.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(12, 6)..DisplayPoint::new(12, 6)]
|
||||
);
|
||||
});
|
||||
// Cursor is at the first diagnostic
|
||||
editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(12, 6)..DisplayPoint::new(12, 6)]
|
||||
);
|
||||
});
|
||||
|
||||
// Diagnostics are added for another earlier path.
|
||||
@@ -1114,78 +1137,77 @@ mod tests {
|
||||
});
|
||||
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "path header block".into()),
|
||||
(9, "diagnostic header".into()),
|
||||
(22, "collapsed context".into()),
|
||||
(23, "diagnostic header".into()),
|
||||
(32, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// consts.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"\n", // supporting diagnostic
|
||||
"const b: i32 = c;\n",
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "path header block".into()),
|
||||
(9, "diagnostic header".into()),
|
||||
(22, "collapsed context".into()),
|
||||
(23, "diagnostic header".into()),
|
||||
(32, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
|
||||
// Cursor keeps its position.
|
||||
view.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(19, 6)..DisplayPoint::new(19, 6)]
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// consts.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"\n", // supporting diagnostic
|
||||
"const b: i32 = c;\n",
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
|
||||
// Cursor keeps its position.
|
||||
editor.update(cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(19, 6)..DisplayPoint::new(19, 6)]
|
||||
);
|
||||
});
|
||||
|
||||
// Diagnostics are added to the first path
|
||||
@@ -1230,80 +1252,79 @@ mod tests {
|
||||
});
|
||||
|
||||
view.next_notification(cx).await;
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
(13, "path header block".into()),
|
||||
(15, "diagnostic header".into()),
|
||||
(28, "collapsed context".into()),
|
||||
(29, "diagnostic header".into()),
|
||||
(38, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// consts.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"\n", // supporting diagnostic
|
||||
"const b: i32 = c;\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"const b: i32 = c;\n",
|
||||
"\n", // supporting diagnostic
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
(13, "path header block".into()),
|
||||
(15, "diagnostic header".into()),
|
||||
(28, "collapsed context".into()),
|
||||
(29, "diagnostic header".into()),
|
||||
(38, "collapsed context".into()),
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
//
|
||||
// consts.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"\n", // supporting diagnostic
|
||||
"const b: i32 = c;\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"const a: i32 = 'a';\n",
|
||||
"const b: i32 = c;\n",
|
||||
"\n", // supporting diagnostic
|
||||
//
|
||||
// main.rs
|
||||
//
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
" let x = vec![];\n",
|
||||
" let y = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" a(x);\n",
|
||||
" b(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" // comment 1\n",
|
||||
" // comment 2\n",
|
||||
" c(y);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" d(x);\n",
|
||||
"\n", // context ellipsis
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // filename
|
||||
"fn main() {\n",
|
||||
" let x = vec![];\n",
|
||||
"\n", // supporting diagnostic
|
||||
" let y = vec![];\n",
|
||||
" a(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
" b(y);\n",
|
||||
"\n", // context ellipsis
|
||||
" c(y);\n",
|
||||
" d(x);\n",
|
||||
"\n", // supporting diagnostic
|
||||
"}"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1333,8 +1354,14 @@ mod tests {
|
||||
let workspace = window.root(cx).unwrap();
|
||||
|
||||
let view = window.build_view(cx, |cx| {
|
||||
ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx)
|
||||
ProjectDiagnosticsEditor::new_with_context(
|
||||
1,
|
||||
project.clone(),
|
||||
workspace.downgrade(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor = view.update(cx, |view, _| view.editor.clone());
|
||||
|
||||
// Two language servers start updating diagnostics
|
||||
project.update(cx, |project, cx| {
|
||||
@@ -1368,27 +1395,25 @@ mod tests {
|
||||
|
||||
// Only the first language server's diagnostics are shown.
|
||||
cx.executor().run_until_parked();
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", //
|
||||
"b();",
|
||||
)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", //
|
||||
"b();",
|
||||
)
|
||||
);
|
||||
|
||||
// The second language server finishes
|
||||
project.update(cx, |project, cx| {
|
||||
@@ -1416,36 +1441,34 @@ mod tests {
|
||||
|
||||
// Both language server's diagnostics are shown.
|
||||
cx.executor().run_until_parked();
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(6, "collapsed context".into()),
|
||||
(7, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // location
|
||||
"b();\n", //
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // context
|
||||
"b();\n", //
|
||||
"c();", // context
|
||||
)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(6, "collapsed context".into()),
|
||||
(7, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // location
|
||||
"b();\n", //
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // context
|
||||
"b();\n", //
|
||||
"c();", // context
|
||||
)
|
||||
);
|
||||
|
||||
// Both language servers start updating diagnostics, and the first server finishes.
|
||||
project.update(cx, |project, cx| {
|
||||
@@ -1484,37 +1507,35 @@ mod tests {
|
||||
|
||||
// Only the first language server's diagnostics are updated.
|
||||
cx.executor().run_until_parked();
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // location
|
||||
"b();\n", //
|
||||
"c();\n", // context
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"b();\n", // context
|
||||
"c();\n", //
|
||||
"d();", // context
|
||||
)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"a();\n", // location
|
||||
"b();\n", //
|
||||
"c();\n", // context
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"b();\n", // context
|
||||
"c();\n", //
|
||||
"d();", // context
|
||||
)
|
||||
);
|
||||
|
||||
// The second language server finishes.
|
||||
project.update(cx, |project, cx| {
|
||||
@@ -1542,37 +1563,35 @@ mod tests {
|
||||
|
||||
// Both language servers' diagnostics are updated.
|
||||
cx.executor().run_until_parked();
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(
|
||||
editor_blocks(&view.editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
view.editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"b();\n", // location
|
||||
"c();\n", //
|
||||
"d();\n", // context
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"c();\n", // context
|
||||
"d();\n", //
|
||||
"e();", // context
|
||||
)
|
||||
);
|
||||
});
|
||||
assert_eq!(
|
||||
editor_blocks(&editor, cx),
|
||||
[
|
||||
(0, "path header block".into()),
|
||||
(2, "diagnostic header".into()),
|
||||
(7, "collapsed context".into()),
|
||||
(8, "diagnostic header".into()),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
editor.update(cx, |editor, cx| editor.display_text(cx)),
|
||||
concat!(
|
||||
"\n", // filename
|
||||
"\n", // padding
|
||||
// diagnostic group 1
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"b();\n", // location
|
||||
"c();\n", //
|
||||
"d();\n", // context
|
||||
"\n", // collapsed context
|
||||
// diagnostic group 2
|
||||
"\n", // primary message
|
||||
"\n", // padding
|
||||
"c();\n", // context
|
||||
"d();\n", //
|
||||
"e();", // context
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
@@ -1589,45 +1608,58 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
fn editor_blocks(editor: &View<Editor>, cx: &mut WindowContext) -> Vec<(u32, SharedString)> {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
snapshot
|
||||
.blocks_in_range(0..snapshot.max_point().row())
|
||||
.enumerate()
|
||||
.filter_map(|(ix, (row, block))| {
|
||||
let name: SharedString = match block {
|
||||
TransformBlock::Custom(block) => cx.with_element_context({
|
||||
|cx| -> Option<SharedString> {
|
||||
let mut element = block.render(&mut BlockContext {
|
||||
context: cx,
|
||||
anchor_x: px(0.),
|
||||
gutter_dimensions: &GutterDimensions::default(),
|
||||
line_height: px(0.),
|
||||
em_width: px(0.),
|
||||
max_width: px(0.),
|
||||
block_id: ix,
|
||||
editor_style: &editor::EditorStyle::default(),
|
||||
});
|
||||
let element = element.downcast_mut::<Stateful<Div>>().unwrap();
|
||||
element.interactivity().element_id.clone()?.try_into().ok()
|
||||
}
|
||||
})?,
|
||||
fn editor_blocks(
|
||||
editor: &View<Editor>,
|
||||
cx: &mut VisualTestContext,
|
||||
) -> Vec<(u32, SharedString)> {
|
||||
let mut blocks = Vec::new();
|
||||
cx.draw(gpui::Point::default(), AvailableSpace::min_size(), |cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
blocks.extend(
|
||||
snapshot
|
||||
.blocks_in_range(0..snapshot.max_point().row())
|
||||
.enumerate()
|
||||
.filter_map(|(ix, (row, block))| {
|
||||
let name: SharedString = match block {
|
||||
TransformBlock::Custom(block) => {
|
||||
let mut element = block.render(&mut BlockContext {
|
||||
context: cx,
|
||||
anchor_x: px(0.),
|
||||
gutter_dimensions: &GutterDimensions::default(),
|
||||
line_height: px(0.),
|
||||
em_width: px(0.),
|
||||
max_width: px(0.),
|
||||
block_id: ix,
|
||||
editor_style: &editor::EditorStyle::default(),
|
||||
});
|
||||
let element = element.downcast_mut::<Stateful<Div>>().unwrap();
|
||||
element
|
||||
.interactivity()
|
||||
.element_id
|
||||
.clone()?
|
||||
.try_into()
|
||||
.ok()?
|
||||
}
|
||||
|
||||
TransformBlock::ExcerptHeader {
|
||||
starts_new_buffer, ..
|
||||
} => {
|
||||
if *starts_new_buffer {
|
||||
"path header block".into()
|
||||
} else {
|
||||
"collapsed context".into()
|
||||
}
|
||||
}
|
||||
};
|
||||
TransformBlock::ExcerptHeader {
|
||||
starts_new_buffer, ..
|
||||
} => {
|
||||
if *starts_new_buffer {
|
||||
"path header block".into()
|
||||
} else {
|
||||
"collapsed context".into()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Some((row, name))
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
Some((row, name))
|
||||
}),
|
||||
)
|
||||
});
|
||||
|
||||
div().into_any()
|
||||
});
|
||||
blocks
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,12 +94,19 @@ pub struct SelectDownByLines {
|
||||
pub(super) lines: u32,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Clone, Deserialize, Default)]
|
||||
pub struct ExpandExcerpts {
|
||||
#[serde(default)]
|
||||
pub(super) lines: u32,
|
||||
}
|
||||
|
||||
impl_actions!(
|
||||
editor,
|
||||
[
|
||||
SelectNext,
|
||||
SelectPrevious,
|
||||
SelectToBeginningOfLine,
|
||||
ExpandExcerpts,
|
||||
MovePageUp,
|
||||
MovePageDown,
|
||||
SelectToEndOfLine,
|
||||
@@ -245,6 +252,7 @@ gpui::actions!(
|
||||
Tab,
|
||||
TabPrev,
|
||||
ToggleGitBlame,
|
||||
ToggleGitBlameInline,
|
||||
ToggleInlayHints,
|
||||
ToggleLineNumbers,
|
||||
ToggleSoftWrap,
|
||||
@@ -253,6 +261,6 @@ gpui::actions!(
|
||||
UndoSelection,
|
||||
UnfoldLines,
|
||||
UniqueLinesCaseSensitive,
|
||||
UniqueLinesCaseInsensitive
|
||||
UniqueLinesCaseInsensitive,
|
||||
]
|
||||
);
|
||||
|
||||