Compare commits
2 Commits
edit-syste
...
installer
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
284ad179d7 | ||
|
|
30b55279b5 |
4
.github/pull_request_template.md
vendored
@@ -6,8 +6,6 @@ Release Notes:
|
||||
|
||||
Optionally, include screenshots / media showcasing your addition that can be included in the release notes.
|
||||
|
||||
### Or...
|
||||
|
||||
Release Notes:
|
||||
**or**
|
||||
|
||||
- N/A
|
||||
|
||||
35
.github/workflows/ci.yml
vendored
@@ -104,19 +104,23 @@ jobs:
|
||||
# todo(linux): Actually run the tests
|
||||
linux_tests:
|
||||
name: (Linux) Run Clippy and tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: cargo clippy
|
||||
run: cargo xtask clippy
|
||||
|
||||
@@ -126,7 +130,7 @@ jobs:
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
name: (Windows) Run Clippy and tests
|
||||
runs-on: hosted-windows-1
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
@@ -258,25 +262,26 @@ jobs:
|
||||
|
||||
bundle-linux:
|
||||
name: Create a Linux bundle
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
runs-on: ubuntu-22.04 # keep the version fixed to avoid libc and dynamic linked library issues
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: Configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
|
||||
35
.github/workflows/deploy_docs.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Deploy Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy-docs:
|
||||
name: Deploy Docs
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup mdBook
|
||||
uses: peaceiris/actions-mdbook@v2
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Build book
|
||||
run: |
|
||||
set -euo pipefail
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
|
||||
- name: Deploy
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: pages deploy target/deploy --project-name=docs
|
||||
14
.github/workflows/release_nightly.yml
vendored
@@ -96,9 +96,7 @@ jobs:
|
||||
bundle-deb:
|
||||
name: Create a Linux *.tar.gz bundle
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
runs-on: ubuntu-22.04 # keep the version fixed to avoid libc and dynamic linked library issues
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
@@ -111,8 +109,14 @@ jobs:
|
||||
clean: false
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
|
||||
- name: Configure linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/linux
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
|
||||
9
.mailmap
@@ -15,12 +15,8 @@ Christian Bergschneider <christian.bergschneider@gmx.de>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de> <magiclake@gmx.de>
|
||||
Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
Greg Morenz <greg-morenz@droid.cafe> <morenzg@gmail.com>
|
||||
Ivan Žužak <izuzak@gmail.com>
|
||||
Ivan Žužak <izuzak@gmail.com> <ivan.zuzak@github.com>
|
||||
Joseph T. Lyons <JosephTLyons@gmail.com>
|
||||
Joseph T. Lyons <JosephTLyons@gmail.com> <JosephTLyons@users.noreply.github.com>
|
||||
Julia <floc@unpromptedtirade.com>
|
||||
@@ -33,9 +29,6 @@ Kirill Bulatov <kirill@zed.dev>
|
||||
Kirill Bulatov <kirill@zed.dev> <mail4score@gmail.com>
|
||||
Kyle Caverly <kylebcaverly@gmail.com>
|
||||
Kyle Caverly <kylebcaverly@gmail.com> <kyle@zed.dev>
|
||||
LoganDark <contact@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <git@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <github@logandark.mozmail.com>
|
||||
Marshall Bowers <elliott.codes@gmail.com>
|
||||
Marshall Bowers <elliott.codes@gmail.com> <marshall@zed.dev>
|
||||
Max Brunsfeld <maxbrunsfeld@gmail.com>
|
||||
@@ -48,8 +41,6 @@ Nate Butler <iamnbutler@gmail.com> <nate@zed.dev>
|
||||
Nathan Sobo <nathan@zed.dev>
|
||||
Nathan Sobo <nathan@zed.dev> <nathan@warp.dev>
|
||||
Nathan Sobo <nathan@zed.dev> <nathansobo@gmail.com>
|
||||
Petros Amoiridis <petros@hey.com>
|
||||
Petros Amoiridis <petros@hey.com> <petros@zed.dev>
|
||||
Piotr Osiewicz <piotr@zed.dev>
|
||||
Piotr Osiewicz <piotr@zed.dev> <24362066+osiewicz@users.noreply.github.com>
|
||||
Robert Clover <git@clo4.net>
|
||||
|
||||
340
Cargo.lock
generated
@@ -16,6 +16,7 @@ dependencies = [
|
||||
"project",
|
||||
"smallvec",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
@@ -377,7 +378,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"assistant_tooling",
|
||||
"chrono",
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
@@ -385,17 +385,15 @@ dependencies = [
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"language",
|
||||
"languages",
|
||||
"log",
|
||||
"nanoid",
|
||||
"node_runtime",
|
||||
"open_ai",
|
||||
"picker",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"release_channel",
|
||||
"rich_text",
|
||||
"schemars",
|
||||
@@ -415,20 +413,10 @@ name = "assistant_tooling"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"log",
|
||||
"project",
|
||||
"repair_json",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"sum_tree",
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -486,7 +474,6 @@ version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c"
|
||||
dependencies = [
|
||||
"deflate64",
|
||||
"flate2",
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
@@ -820,19 +807,6 @@ dependencies = [
|
||||
"tungstenite 0.16.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async_zip"
|
||||
version = "0.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"crc32fast",
|
||||
"futures-lite 2.2.0",
|
||||
"pin-project",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atoi"
|
||||
version = "2.0.0"
|
||||
@@ -1508,7 +1482,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-graphics"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c#e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c"
|
||||
source = "git+https://github.com/kvark/blade?rev=e82eec97691c3acdb43494484be60d661edfebf3#e82eec97691c3acdb43494484be60d661edfebf3"
|
||||
dependencies = [
|
||||
"ash",
|
||||
"ash-window",
|
||||
@@ -1538,7 +1512,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-macros"
|
||||
version = "0.2.1"
|
||||
source = "git+https://github.com/kvark/blade?rev=e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c#e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c"
|
||||
source = "git+https://github.com/kvark/blade?rev=e82eec97691c3acdb43494484be60d661edfebf3#e82eec97691c3acdb43494484be60d661edfebf3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2119,11 +2093,7 @@ dependencies = [
|
||||
"clap 4.4.4",
|
||||
"core-foundation",
|
||||
"core-services",
|
||||
"exec",
|
||||
"fork",
|
||||
"ipc-channel",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"plist",
|
||||
"release_channel",
|
||||
"serde",
|
||||
@@ -2303,7 +2273,6 @@ dependencies = [
|
||||
"collections",
|
||||
"ctor",
|
||||
"dashmap",
|
||||
"dev_server_projects",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"envy",
|
||||
@@ -2311,7 +2280,6 @@ dependencies = [
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"git",
|
||||
"git_hosting_providers",
|
||||
"google_ai",
|
||||
"gpui",
|
||||
"headless",
|
||||
@@ -2334,6 +2302,7 @@ dependencies = [
|
||||
"prost",
|
||||
"rand 0.8.5",
|
||||
"release_channel",
|
||||
"remote_projects",
|
||||
"reqwest",
|
||||
"rpc",
|
||||
"rustc-demangle",
|
||||
@@ -2348,7 +2317,6 @@ dependencies = [
|
||||
"sha2 0.10.7",
|
||||
"sqlx",
|
||||
"subtle",
|
||||
"supermaven_api",
|
||||
"telemetry_events",
|
||||
"text",
|
||||
"theme",
|
||||
@@ -2377,7 +2345,6 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"db",
|
||||
"dev_server_projects",
|
||||
"editor",
|
||||
"emojis",
|
||||
"extensions_ui",
|
||||
@@ -2393,7 +2360,6 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
"rich_text",
|
||||
"rpc",
|
||||
"schemars",
|
||||
@@ -2545,10 +2511,30 @@ dependencies = [
|
||||
"async-compression",
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"language",
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
"parking_lot",
|
||||
"rpc",
|
||||
"serde",
|
||||
"settings",
|
||||
"smol",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "copilot_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"copilot",
|
||||
"editor",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
@@ -2557,18 +2543,14 @@ dependencies = [
|
||||
"language",
|
||||
"lsp",
|
||||
"menu",
|
||||
"node_runtime",
|
||||
"parking_lot",
|
||||
"project",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3132,12 +3114,6 @@ dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deflate64"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83ace6c86376be0b6cdcf3fb41882e81d94b31587573d1cfa9d01cd06bba210d"
|
||||
|
||||
[[package]]
|
||||
name = "der"
|
||||
version = "0.6.1"
|
||||
@@ -3201,18 +3177,6 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dev_server_projects"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"gpui",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "diagnostics"
|
||||
version = "0.1.0"
|
||||
@@ -3433,7 +3397,6 @@ dependencies = [
|
||||
"smol",
|
||||
"snippet",
|
||||
"sum_tree",
|
||||
"task",
|
||||
"text",
|
||||
"theme",
|
||||
"time",
|
||||
@@ -3579,17 +3542,6 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
|
||||
dependencies = [
|
||||
"errno-dragonfly",
|
||||
"libc",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.8"
|
||||
@@ -3600,16 +3552,6 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "errno-dragonfly"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "etagere"
|
||||
version = "0.2.8"
|
||||
@@ -3688,16 +3630,6 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exec"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "886b70328cba8871bfc025858e1de4be16b1d5088f2ba50b57816f4210672615"
|
||||
dependencies = [
|
||||
"errno 0.2.8",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "extension"
|
||||
version = "0.1.0"
|
||||
@@ -4098,15 +4030,6 @@ version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
|
||||
|
||||
[[package]]
|
||||
name = "fork"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60e74d3423998a57e9d906e49252fb79eb4a04d5cdfe188fb1b7ff9fc076a8ed"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.1"
|
||||
@@ -4183,7 +4106,6 @@ name = "fsevent"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitflags 2.4.2",
|
||||
"core-foundation",
|
||||
"fsevent-sys 3.1.0",
|
||||
"parking_lot",
|
||||
"tempfile",
|
||||
@@ -4463,16 +4385,14 @@ name = "git"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"clock",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"git2",
|
||||
"gpui",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"rope",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4499,25 +4419,6 @@ dependencies = [
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "git_hosting_providers"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"futures 0.3.28",
|
||||
"git",
|
||||
"gpui",
|
||||
"isahc",
|
||||
"pretty_assertions",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"unindent",
|
||||
"url",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
@@ -4566,7 +4467,6 @@ name = "go_to_line"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"editor",
|
||||
"gpui",
|
||||
"indoc",
|
||||
@@ -4704,7 +4604,6 @@ dependencies = [
|
||||
"wayland-client",
|
||||
"wayland-cursor",
|
||||
"wayland-protocols",
|
||||
"wayland-protocols-plasma",
|
||||
"windows 0.53.0",
|
||||
"x11rb",
|
||||
"xkbcommon",
|
||||
@@ -4832,6 +4731,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"ctrlc",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
@@ -4843,7 +4743,6 @@ dependencies = [
|
||||
"rpc",
|
||||
"settings",
|
||||
"shellexpand",
|
||||
"signal-hook",
|
||||
"util",
|
||||
]
|
||||
|
||||
@@ -5229,30 +5128,6 @@ dependencies = [
|
||||
"syn 2.0.59",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inline_completion_button"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"copilot",
|
||||
"editor",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"language",
|
||||
"lsp",
|
||||
"project",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"supermaven",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.9.6"
|
||||
@@ -5658,7 +5533,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"copilot",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"futures 0.3.28",
|
||||
@@ -5802,9 +5676,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
|
||||
|
||||
[[package]]
|
||||
name = "libmimalloc-sys"
|
||||
version = "0.1.37"
|
||||
version = "0.1.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81eb4061c0582dedea1cbc7aff2240300dd6982e0239d1c99e65c1dbf4a30ba7"
|
||||
checksum = "3979b5c37ece694f1f5e51e7ecc871fdb0f517ed04ee45f88d15d6d553cb9664"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@@ -6041,27 +5915,6 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"env_logger",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"language",
|
||||
"languages",
|
||||
"linkify",
|
||||
"log",
|
||||
"node_runtime",
|
||||
"pulldown-cmark",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markdown_preview"
|
||||
version = "0.1.0"
|
||||
@@ -6190,9 +6043,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
version = "0.1.41"
|
||||
version = "0.1.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f41a2280ded0da56c8cf898babb86e8f10651a34adcfff190ae9a1159c6908d"
|
||||
checksum = "fa01922b5ea280a911e323e4d2fd24b7fe5cc4042e0d2cda3c40775cdc4bdc9c"
|
||||
dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
@@ -6428,20 +6281,15 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"async_zip",
|
||||
"futures 0.3.28",
|
||||
"log",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"util",
|
||||
"walkdir",
|
||||
"windows 0.53.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6939,7 +6787,6 @@ dependencies = [
|
||||
name = "outline"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"collections",
|
||||
"editor",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
@@ -7559,6 +7406,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"copilot",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
@@ -7942,7 +7790,6 @@ name = "recent_projects"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dev_server_projects",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fuzzy",
|
||||
@@ -7952,6 +7799,7 @@ dependencies = [
|
||||
"ordered-float 2.10.0",
|
||||
"picker",
|
||||
"project",
|
||||
"remote_projects",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -8086,6 +7934,18 @@ dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "remote_projects"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"gpui",
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rend"
|
||||
version = "0.4.0"
|
||||
@@ -8095,15 +7955,6 @@ dependencies = [
|
||||
"bytecheck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "repair_json"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ee191e184125fe72cb59b74160e25584e3908f2aaa84cbda1e161347102aa15"
|
||||
dependencies = [
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.11.20"
|
||||
@@ -8444,7 +8295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"errno 0.3.8",
|
||||
"errno",
|
||||
"io-lifetimes 1.0.11",
|
||||
"libc",
|
||||
"linux-raw-sys 0.3.8",
|
||||
@@ -8458,7 +8309,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
|
||||
dependencies = [
|
||||
"bitflags 2.4.2",
|
||||
"errno 0.3.8",
|
||||
"errno",
|
||||
"itoa",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.12",
|
||||
@@ -8472,7 +8323,7 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a25c3aad9fc1424eb82c88087789a7d938e1829724f3e4043163baf0d13cfc12"
|
||||
dependencies = [
|
||||
"errno 0.3.8",
|
||||
"errno",
|
||||
"libc",
|
||||
"rustix 0.38.32",
|
||||
]
|
||||
@@ -8850,12 +8701,7 @@ dependencies = [
|
||||
"sha2 0.10.7",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace",
|
||||
"worktree",
|
||||
]
|
||||
|
||||
@@ -9742,43 +9588,6 @@ dependencies = [
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "supermaven"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"postage",
|
||||
"project",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"supermaven_api",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "supermaven_api"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.28",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sval"
|
||||
version = "2.8.0"
|
||||
@@ -10012,7 +9821,6 @@ dependencies = [
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"hex",
|
||||
"parking_lot",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json_lenient",
|
||||
@@ -10025,6 +9833,7 @@ dependencies = [
|
||||
name = "tasks_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"editor",
|
||||
"file_icons",
|
||||
"fuzzy",
|
||||
@@ -10239,18 +10048,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.60"
|
||||
version = "1.0.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18"
|
||||
checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.60"
|
||||
version = "1.0.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524"
|
||||
checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -11396,9 +11205,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
@@ -11916,19 +11725,6 @@ dependencies = [
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-plasma"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23803551115ff9ea9bce586860c5c5a971e360825a0309264102a9495a5ff479"
|
||||
dependencies = [
|
||||
"bitflags 2.4.2",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-wlr"
|
||||
version = "0.2.0"
|
||||
@@ -11996,12 +11792,11 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"copilot_ui",
|
||||
"db",
|
||||
"editor",
|
||||
"extensions_ui",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"inline_completion_button",
|
||||
"install_cli",
|
||||
"picker",
|
||||
"project",
|
||||
@@ -12608,7 +12403,6 @@ dependencies = [
|
||||
"collections",
|
||||
"db",
|
||||
"derive_more",
|
||||
"dev_server_projects",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
@@ -12621,6 +12415,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"project",
|
||||
"remote_projects",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -12859,7 +12654,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.136.0"
|
||||
version = "0.134.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
@@ -12876,12 +12671,13 @@ dependencies = [
|
||||
"clap 4.4.4",
|
||||
"cli",
|
||||
"client",
|
||||
"clock",
|
||||
"collab_ui",
|
||||
"collections",
|
||||
"command_palette",
|
||||
"copilot",
|
||||
"copilot_ui",
|
||||
"db",
|
||||
"dev_server_projects",
|
||||
"diagnostics",
|
||||
"editor",
|
||||
"env_logger",
|
||||
@@ -12892,13 +12688,10 @@ dependencies = [
|
||||
"file_icons",
|
||||
"fs",
|
||||
"futures 0.3.28",
|
||||
"git",
|
||||
"git_hosting_providers",
|
||||
"go_to_line",
|
||||
"gpui",
|
||||
"headless",
|
||||
"image_viewer",
|
||||
"inline_completion_button",
|
||||
"install_cli",
|
||||
"isahc",
|
||||
"journal",
|
||||
@@ -12906,7 +12699,6 @@ dependencies = [
|
||||
"language_selector",
|
||||
"language_tools",
|
||||
"languages",
|
||||
"libc",
|
||||
"log",
|
||||
"markdown_preview",
|
||||
"menu",
|
||||
@@ -12923,6 +12715,7 @@ dependencies = [
|
||||
"quick_action_bar",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
"remote_projects",
|
||||
"rope",
|
||||
"search",
|
||||
"serde",
|
||||
@@ -12930,7 +12723,6 @@ dependencies = [
|
||||
"settings",
|
||||
"simplelog",
|
||||
"smol",
|
||||
"supermaven",
|
||||
"tab_switcher",
|
||||
"task",
|
||||
"tasks_ui",
|
||||
@@ -12994,7 +12786,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_elixir"
|
||||
version = "0.0.4"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@@ -13128,7 +12920,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_toml"
|
||||
version = "0.1.1"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@@ -13149,7 +12941,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_zig"
|
||||
version = "0.1.2"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
33
Cargo.toml
@@ -4,8 +4,8 @@ members = [
|
||||
"crates/anthropic",
|
||||
"crates/assets",
|
||||
"crates/assistant",
|
||||
"crates/assistant2",
|
||||
"crates/assistant_tooling",
|
||||
"crates/assistant2",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/breadcrumbs",
|
||||
@@ -20,6 +20,7 @@ members = [
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/copilot",
|
||||
"crates/copilot_ui",
|
||||
"crates/db",
|
||||
"crates/diagnostics",
|
||||
"crates/editor",
|
||||
@@ -35,14 +36,12 @@ members = [
|
||||
"crates/fsevent",
|
||||
"crates/fuzzy",
|
||||
"crates/git",
|
||||
"crates/git_hosting_providers",
|
||||
"crates/go_to_line",
|
||||
"crates/google_ai",
|
||||
"crates/gpui",
|
||||
"crates/gpui_macros",
|
||||
"crates/headless",
|
||||
"crates/image_viewer",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/journal",
|
||||
"crates/language",
|
||||
@@ -52,7 +51,6 @@ members = [
|
||||
"crates/live_kit_client",
|
||||
"crates/live_kit_server",
|
||||
"crates/lsp",
|
||||
"crates/markdown",
|
||||
"crates/markdown_preview",
|
||||
"crates/media",
|
||||
"crates/menu",
|
||||
@@ -71,7 +69,7 @@ members = [
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/release_channel",
|
||||
"crates/dev_server_projects",
|
||||
"crates/remote_projects",
|
||||
"crates/rich_text",
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
@@ -88,8 +86,6 @@ members = [
|
||||
"crates/storybook",
|
||||
"crates/sum_tree",
|
||||
"crates/tab_switcher",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/terminal",
|
||||
"crates/terminal_view",
|
||||
"crates/text",
|
||||
@@ -163,6 +159,7 @@ color = { path = "crates/color" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
copilot_ui = { path = "crates/copilot_ui" }
|
||||
db = { path = "crates/db" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
@@ -176,7 +173,6 @@ fs = { path = "crates/fs" }
|
||||
fsevent = { path = "crates/fsevent" }
|
||||
fuzzy = { path = "crates/fuzzy" }
|
||||
git = { path = "crates/git" }
|
||||
git_hosting_providers = { path = "crates/git_hosting_providers" }
|
||||
go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui" }
|
||||
@@ -184,7 +180,6 @@ gpui_macros = { path = "crates/gpui_macros" }
|
||||
headless = { path = "crates/headless" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_selector = { path = "crates/language_selector" }
|
||||
@@ -193,7 +188,6 @@ languages = { path = "crates/languages" }
|
||||
live_kit_client = { path = "crates/live_kit_client" }
|
||||
live_kit_server = { path = "crates/live_kit_server" }
|
||||
lsp = { path = "crates/lsp" }
|
||||
markdown = { path = "crates/markdown" }
|
||||
markdown_preview = { path = "crates/markdown_preview" }
|
||||
media = { path = "crates/media" }
|
||||
menu = { path = "crates/menu" }
|
||||
@@ -213,7 +207,7 @@ project_symbols = { path = "crates/project_symbols" }
|
||||
quick_action_bar = { path = "crates/quick_action_bar" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
remote_projects = { path = "crates/remote_projects" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
@@ -226,8 +220,6 @@ settings = { path = "crates/settings" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
sqlez = { path = "crates/sqlez" }
|
||||
sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
story = { path = "crates/story" }
|
||||
storybook = { path = "crates/storybook" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
@@ -257,23 +249,20 @@ async-fs = "1.6"
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
bitflags = "2.4.2"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e35b2d41f221a48b75f7cf2e78a81e7ecb7a383c" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e82eec97691c3acdb43494484be60d661edfebf3" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e82eec97691c3acdb43494484be60d661edfebf3" }
|
||||
cap-std = "3.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clickhouse = { version = "0.11.6" }
|
||||
ctor = "0.2.6"
|
||||
signal-hook = "0.3.17"
|
||||
ctrlc = "3.4.4"
|
||||
core-foundation = { version = "0.9.3" }
|
||||
core-foundation-sys = "0.8.6"
|
||||
derive_more = "0.99.17"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.9"
|
||||
exec = "0.3.1"
|
||||
fork = "0.1.23"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
@@ -292,12 +281,10 @@ isahc = { version = "1.7.2", default-features = false, features = [
|
||||
] }
|
||||
itertools = "0.11.0"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
nanoid = "0.4"
|
||||
nix = "0.28"
|
||||
once_cell = "1.19.0"
|
||||
ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
@@ -311,7 +298,6 @@ pulldown-cmark = { version = "0.10.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
refineable = { path = "./crates/refineable" }
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
schemars = "0.8"
|
||||
@@ -391,8 +377,6 @@ version = "0.53.0"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Numerics",
|
||||
"System",
|
||||
"System_Threading",
|
||||
"Wdk_System_SystemServices",
|
||||
"Win32_Globalization",
|
||||
"Win32_Graphics_Direct2D",
|
||||
@@ -416,7 +400,6 @@ features = [
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Time",
|
||||
"Win32_System_WinRT",
|
||||
"Win32_UI_Controls",
|
||||
"Win32_UI_HiDpi",
|
||||
"Win32_UI_Input_Ime",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.78-bookworm as builder
|
||||
FROM rust:1.77-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
101
README.md
@@ -1,50 +1,51 @@
|
||||
# Zed
|
||||
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
## Installation
|
||||
|
||||
You can [download](https://zed.dev/download) Zed today for macOS (v10.15+).
|
||||
|
||||
Support for additional platforms is on our [roadmap](https://zed.dev/roadmap):
|
||||
|
||||
- Linux ([tracking issue](https://github.com/zed-industries/zed/issues/7015))
|
||||
- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394))
|
||||
- Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396))
|
||||
|
||||
For macOS users, you can also install Zed using [Homebrew](https://brew.sh/):
|
||||
|
||||
```sh
|
||||
brew install --cask zed
|
||||
```
|
||||
|
||||
Alternatively, to install the Preview release:
|
||||
|
||||
```sh
|
||||
brew install --cask zed@preview
|
||||
```
|
||||
|
||||
## Developing Zed
|
||||
|
||||
- [Building Zed for macOS](./docs/src/development/macos.md)
|
||||
- [Building Zed for Linux](./docs/src/development/linux.md)
|
||||
- [Building Zed for Windows](./docs/src/development/windows.md)
|
||||
- [Running Collaboration Locally](./docs/src/development/local-collaboration.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
See [CONTRIBUTING.md](./CONTRIBUTING.md) for ways you can contribute to Zed.
|
||||
|
||||
Also... we're hiring! Check out our [jobs](https://zed.dev/jobs) page for open roles.
|
||||
|
||||
## Licensing
|
||||
|
||||
License information for third party dependencies must be correctly provided for CI to pass.
|
||||
|
||||
We use [`cargo-about`](https://github.com/EmbarkStudios/cargo-about) to automatically comply with open source licenses. If CI is failing, check the following:
|
||||
|
||||
- Is it showing a `no license specified` error for a crate you've created? If so, add `publish = false` under `[package]` in your crate's Cargo.toml.
|
||||
- Is the error `failed to satisfy license requirements` for a dependency? If so, first determine what license the project has and whether this system is sufficient to comply with this license's requirements. If you're unsure, ask a lawyer. Once you've verified that this system is acceptable add the license's SPDX identifier to the `accepted` array in `script/licenses/zed-licenses.toml`.
|
||||
- Is `cargo-about` unable to find the license for a dependency? If so, add a clarification field at the end of `script/licenses/zed-licenses.toml`, as specified in the [cargo-about book](https://embarkstudios.github.io/cargo-about/cli/generate/config.html#crate-configuration).
|
||||
# Zed
|
||||
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
## Installation
|
||||
|
||||
You can [download](https://zed.dev/download) Zed today for macOS (v10.15+).
|
||||
|
||||
Support for additional platforms is on our [roadmap](https://zed.dev/roadmap):
|
||||
|
||||
- Linux ([tracking issue](https://github.com/zed-industries/zed/issues/7015))
|
||||
- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394))
|
||||
- Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396))
|
||||
|
||||
For macOS users, you can also install Zed using [Homebrew](https://brew.sh/):
|
||||
|
||||
```sh
|
||||
brew install --cask zed
|
||||
```
|
||||
|
||||
Alternatively, to install the Preview release:
|
||||
|
||||
```sh
|
||||
brew tap homebrew/cask-versions
|
||||
brew install zed-preview
|
||||
```
|
||||
|
||||
## Developing Zed
|
||||
|
||||
- [Building Zed for macOS](./docs/src/developing_zed__building_zed_macos.md)
|
||||
- [Building Zed for Linux](./docs/src/developing_zed__building_zed_linux.md)
|
||||
- [Building Zed for Windows](./docs/src/developing_zed__building_zed_windows.md)
|
||||
- [Running Collaboration Locally](./docs/src/developing_zed__local_collaboration.md)
|
||||
|
||||
## Contributing
|
||||
|
||||
See [CONTRIBUTING.md](./CONTRIBUTING.md) for ways you can contribute to Zed.
|
||||
|
||||
Also... we're hiring! Check out our [jobs](https://zed.dev/jobs) page for open roles.
|
||||
|
||||
## Licensing
|
||||
|
||||
License information for third party dependencies must be correctly provided for CI to pass.
|
||||
|
||||
We use [`cargo-about`](https://github.com/EmbarkStudios/cargo-about) to automatically comply with open source licenses. If CI is failing, check the following:
|
||||
|
||||
- Is it showing a `no license specified` error for a crate you've created? If so, add `publish = false` under `[package]` in your crate's Cargo.toml.
|
||||
- Is the error `failed to satisfy license requirements` for a dependency? If so, first determine what license the project has and whether this system is sufficient to comply with this license's requirements. If you're unsure, ask a lawyer. Once you've verified that this system is acceptable add the license's SPDX identifier to the `accepted` array in `script/licenses/zed-licenses.toml`.
|
||||
- Is `cargo-about` unable to find the license for a dependency? If so, add a clarification field at the end of `script/licenses/zed-licenses.toml`, as specified in the [cargo-about book](https://embarkstudios.github.io/cargo-about/cli/generate/config.html#crate-configuration).
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-code-xml"><path d="m18 16 4-4-4-4"/><path d="m6 8-4 4 4 4"/><path d="m14.5 4-5 16"/></svg>
|
||||
|
Before Width: | Height: | Size: 293 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.5 15C13.433 15 15 13.433 15 11.5C15 9.567 13.433 8 11.5 8C9.567 8 8 9.567 8 11.5C8 13.433 9.567 15 11.5 15Z" fill="black"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 240 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.4662 14.9152C13.5801 15.0291 13.7648 15.0291 13.8787 14.9152L14.9145 13.8793C15.0284 13.7654 15.0284 13.5807 14.9145 13.4667L12.9483 11.5004L14.9145 9.53392C15.0285 9.42004 15.0285 9.23533 14.9145 9.12137L13.8787 8.08547C13.7648 7.97154 13.5801 7.97154 13.4662 8.08547L11.5 10.0519L9.53376 8.08545C9.41988 7.97152 9.23517 7.97152 9.12124 8.08545L8.08543 9.12136C7.97152 9.23533 7.97152 9.42004 8.08543 9.53392L10.0517 11.5004L8.08545 13.4667C7.97155 13.5807 7.97155 13.7654 8.08545 13.8793L9.12126 14.9152C9.23517 15.0292 9.41988 15.0292 9.53376 14.9152L11.5 12.9489L13.4662 14.9152Z" fill="black"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 756 B |
@@ -1,13 +0,0 @@
|
||||
<svg width="8" height="8" viewBox="0 0 8 8" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1803_28)">
|
||||
<path d="M0.5 2C0.5 1.17157 1.17157 0.5 2 0.5V0.5C2.82843 0.5 3.5 1.17157 3.5 2V2C3.5 2.82843 2.82843 3.5 2 3.5V3.5C1.17157 3.5 0.5 2.82843 0.5 2V2Z" fill="black" fill-opacity="0.3"/>
|
||||
<path d="M7.5 6C7.5 6.82843 6.82843 7.5 6 7.5V7.5C5.17157 7.5 4.5 6.82843 4.5 6V6C4.5 5.17157 5.17157 4.5 6 4.5V4.5C6.82843 4.5 7.5 5.17157 7.5 6V6Z" fill="black" fill-opacity="0.6"/>
|
||||
<path d="M2 7.5C1.17157 7.5 0.5 6.82843 0.5 6V6C0.5 5.17157 1.17157 4.5 2 4.5V4.5C2.82843 4.5 3.5 5.17157 3.5 6V6C3.5 6.82843 2.82843 7.5 2 7.5V7.5Z" fill="black" fill-opacity="0.8"/>
|
||||
<path d="M6 0.5C6.82843 0.5 7.5 1.17157 7.5 2V2C7.5 2.82843 6.82843 3.5 6 3.5V3.5C5.17157 3.5 4.5 2.82843 4.5 2V2C4.5 1.17157 5.17157 0.5 6 0.5V0.5Z" fill="black"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1803_28">
|
||||
<rect width="8" height="8" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 956 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4L13 12" stroke="black" stroke-width="2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 181 B |
@@ -1,8 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.30859 13.0703C3.80693 13.0703 4.21094 12.6663 4.21094 12.168C4.21094 11.6696 3.80693 11.2656 3.30859 11.2656C2.81025 11.2656 2.40625 11.6696 2.40625 12.168C2.40625 12.6663 2.81025 13.0703 3.30859 13.0703Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.53516 8.03849L4.10799 12.6055L2.51562 11.7584L4.94279 7.19141L6.53516 8.03849Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.38281 2.62443L4.93916 7.19141L3.33594 6.34432L5.77959 1.77734L7.38281 2.62443Z" fill="black"/>
|
||||
<path d="M6.5625 3.08984C7.06084 3.08984 7.46484 2.68585 7.46484 2.1875C7.46484 1.68915 7.06084 1.28516 6.5625 1.28516C6.06416 1.28516 5.66016 1.68915 5.66016 2.1875C5.66016 2.68585 6.06416 3.08984 6.5625 3.08984Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M10.882 1.31204C11.2842 1.41224 11.5664 1.7732 11.5664 2.18737V12.168H9.76084V5.8056L8.12938 8.87176L6.53516 8.02471L9.86653 1.76385C10.0611 1.39816 10.4799 1.21184 10.882 1.31204Z" fill="black"/>
|
||||
<path d="M10.6641 13.0703C11.1624 13.0703 11.5664 12.6663 11.5664 12.168C11.5664 11.6696 11.1624 11.2656 10.6641 11.2656C10.1657 11.2656 9.76172 11.6696 9.76172 12.168C9.76172 12.6663 10.1657 13.0703 10.6641 13.0703Z" fill="black"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.3 KiB |
@@ -1,15 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.5">
|
||||
<path d="M3.78125 14.9375C4.35078 14.9375 4.8125 14.4758 4.8125 13.9062C4.8125 13.3367 4.35078 12.875 3.78125 12.875C3.21172 12.875 2.75 13.3367 2.75 13.9062C2.75 14.4758 3.21172 14.9375 3.78125 14.9375Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.46875 9.18684L4.69484 14.4062L2.875 13.4382L5.64891 8.21875L7.46875 9.18684Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.4375 2.99935L5.64475 8.21875L3.8125 7.25066L6.60525 2.03125L8.4375 2.99935Z" fill="white"/>
|
||||
<path d="M7.5 3.53125C8.06953 3.53125 8.53125 3.06954 8.53125 2.5C8.53125 1.93046 8.06953 1.46875 7.5 1.46875C6.93047 1.46875 6.46875 1.93046 6.46875 2.5C6.46875 3.06954 6.93047 3.53125 7.5 3.53125Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.4366 1.49947C12.8962 1.61399 13.2188 2.02651 13.2188 2.49985V13.9063H11.1552V6.63497L9.29072 10.1392L7.46875 9.17109L11.276 2.01583C11.4984 1.59789 11.977 1.38496 12.4366 1.49947Z" fill="white"/>
|
||||
<path d="M12.1875 14.9375C12.757 14.9375 13.2188 14.4758 13.2188 13.9062C13.2188 13.3367 12.757 12.875 12.1875 12.875C11.618 12.875 11.1562 13.3367 11.1562 13.9062C11.1562 14.4758 11.618 14.9375 12.1875 14.9375Z" fill="white"/>
|
||||
</g>
|
||||
<g>
|
||||
<path d="M0.906311 6.42261L1.75155 4.60999L15.3462 10.9493L14.5009 12.7619L0.906311 6.42261Z" fill="white"/>
|
||||
<circle cx="14.7841" cy="11.7906" r="1" transform="rotate(-65 14.7841 11.7906)" fill="white"/>
|
||||
<circle cx="1.32893" cy="5.51631" r="1" transform="rotate(-65 1.32893 5.51631)" fill="white"/>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.6 KiB |
@@ -1,11 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.5">
|
||||
<path d="M3.78125 14.9375C4.35078 14.9375 4.8125 14.4758 4.8125 13.9062C4.8125 13.3367 4.35078 12.875 3.78125 12.875C3.21172 12.875 2.75 13.3367 2.75 13.9062C2.75 14.4758 3.21172 14.9375 3.78125 14.9375Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.46875 9.18684L4.69484 14.4062L2.875 13.4382L5.64891 8.21875L7.46875 9.18684Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.4375 2.99935L5.64475 8.21875L3.8125 7.25066L6.60525 2.03125L8.4375 2.99935Z" fill="white"/>
|
||||
<path d="M7.5 3.53125C8.06953 3.53125 8.53125 3.06954 8.53125 2.5C8.53125 1.93046 8.06953 1.46875 7.5 1.46875C6.93047 1.46875 6.46875 1.93046 6.46875 2.5C6.46875 3.06954 6.93047 3.53125 7.5 3.53125Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.4366 1.49947C12.8962 1.61399 13.2188 2.02651 13.2188 2.49985V13.9063H11.1552V6.63497L9.29072 10.1392L7.46875 9.17109L11.276 2.01583C11.4984 1.59789 11.977 1.38496 12.4366 1.49947Z" fill="white"/>
|
||||
<path d="M12.1875 14.9375C12.757 14.9375 13.2188 14.4758 13.2188 13.9062C13.2188 13.3367 12.757 12.875 12.1875 12.875C11.618 12.875 11.1562 13.3367 11.1562 13.9062C11.1562 14.4758 11.618 14.9375 12.1875 14.9375Z" fill="white"/>
|
||||
</g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M14.6847 15.9265C14.7823 16.0241 14.9406 16.0241 15.0382 15.9265L15.9259 15.0387C16.0235 14.9411 16.0235 14.7828 15.9259 14.6851L14.2408 12.9999L15.9259 11.3146C16.0236 11.217 16.0236 11.0587 15.9259 10.961L15.0382 10.0733C14.9406 9.97561 14.7823 9.97561 14.6847 10.0733L12.9996 11.7585L11.3145 10.0732C11.2169 9.97559 11.0586 9.97559 10.9609 10.0732L10.0732 10.961C9.97559 11.0587 9.97559 11.217 10.0732 11.3146L11.7584 12.9999L10.0732 14.6851C9.97562 14.7828 9.97562 14.9411 10.0732 15.0387L10.9609 15.9265C11.0586 16.0242 11.2169 16.0242 11.3145 15.9265L12.9996 14.2413L14.6847 15.9265Z" fill="white"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.9 KiB |
@@ -1,11 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.5">
|
||||
<path d="M3.78125 14.9375C4.35078 14.9375 4.8125 14.4758 4.8125 13.9062C4.8125 13.3367 4.35078 12.875 3.78125 12.875C3.21172 12.875 2.75 13.3367 2.75 13.9062C2.75 14.4758 3.21172 14.9375 3.78125 14.9375Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.46875 9.18684L4.69484 14.4062L2.875 13.4382L5.64891 8.21875L7.46875 9.18684Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.4375 2.99935L5.64475 8.21875L3.8125 7.25066L6.60525 2.03125L8.4375 2.99935Z" fill="white"/>
|
||||
<path d="M7.5 3.53125C8.06953 3.53125 8.53125 3.06954 8.53125 2.5C8.53125 1.93046 8.06953 1.46875 7.5 1.46875C6.93047 1.46875 6.46875 1.93046 6.46875 2.5C6.46875 3.06954 6.93047 3.53125 7.5 3.53125Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M12.4366 1.49947C12.8962 1.61399 13.2188 2.02651 13.2188 2.49985V13.9063H11.1552V6.63497L9.29072 10.1392L7.46875 9.17109L11.276 2.01583C11.4984 1.59789 11.977 1.38496 12.4366 1.49947Z" fill="white"/>
|
||||
<path d="M12.1875 14.9375C12.757 14.9375 13.2188 14.4758 13.2188 13.9062C13.2188 13.3367 12.757 12.875 12.1875 12.875C11.618 12.875 11.1562 13.3367 11.1562 13.9062C11.1562 14.4758 11.618 14.9375 12.1875 14.9375Z" fill="white"/>
|
||||
</g>
|
||||
<circle cx="13" cy="13" r="3" fill="white"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.3 KiB |
@@ -1,5 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7 1.75L5.88467 5.14092C5.82759 5.31446 5.73055 5.47218 5.60136 5.60136C5.47218 5.73055 5.31446 5.82759 5.14092 5.88467L1.75 7L5.14092 8.11533C5.31446 8.17241 5.47218 8.26945 5.60136 8.39864C5.73055 8.52782 5.82759 8.68554 5.88467 8.85908L7 12.25L8.11533 8.85908C8.17241 8.68554 8.26945 8.52782 8.39864 8.39864C8.52782 8.26945 8.68554 8.17241 8.85908 8.11533L12.25 7L8.85908 5.88467C8.68554 5.82759 8.52782 5.73055 8.39864 5.60136C8.26945 5.47218 8.17241 5.31446 8.11533 5.14092L7 1.75Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.91667 1.75V4.08333M1.75 2.91667H4.08333" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.0833 9.91667V12.25M9.91667 11.0833H12.25" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.0 KiB |
@@ -138,8 +138,6 @@
|
||||
"ctrl-alt-space": "editor::ShowCharacterPalette",
|
||||
"ctrl-;": "editor::ToggleLineNumbers",
|
||||
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
|
||||
"ctrl-'": "editor::ToggleHunkDiff",
|
||||
"ctrl-\"": "editor::ExpandAllHunkDiffs",
|
||||
"ctrl-alt-g b": "editor::ToggleGitBlame"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -159,8 +159,6 @@
|
||||
"ctrl-cmd-space": "editor::ShowCharacterPalette",
|
||||
"cmd-;": "editor::ToggleLineNumbers",
|
||||
"cmd-alt-z": "editor::RevertSelectedHunks",
|
||||
"cmd-'": "editor::ToggleHunkDiff",
|
||||
"cmd-\"": "editor::ExpandAllHunkDiffs",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -39,13 +39,13 @@
|
||||
"cmd-shift-left": "editor::SelectToBeginningOfLine",
|
||||
"cmd-shift-right": "editor::SelectToEndOfLine",
|
||||
"alt-shift-left": [
|
||||
"editor::SelectToPreviousWordStart",
|
||||
"editor::SelectToBeginningOfLine",
|
||||
{
|
||||
"stop_at_soft_wraps": true
|
||||
}
|
||||
],
|
||||
"alt-shift-right": [
|
||||
"editor::SelectToNextWordEnd",
|
||||
"editor::SelectToEndOfLine",
|
||||
{
|
||||
"stop_at_soft_wraps": true
|
||||
}
|
||||
|
||||
@@ -117,9 +117,6 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"m": ["vim::PushOperator", "Mark"],
|
||||
"'": ["vim::PushOperator", { "Jump": { "line": true } }],
|
||||
"`": ["vim::PushOperator", { "Jump": { "line": false } }],
|
||||
";": "vim::RepeatFind",
|
||||
",": "vim::RepeatFindReversed",
|
||||
"ctrl-o": "pane::GoBack",
|
||||
@@ -131,7 +128,6 @@
|
||||
"shift-v": "vim::ToggleVisualLine",
|
||||
"ctrl-v": "vim::ToggleVisualBlock",
|
||||
"ctrl-q": "vim::ToggleVisualBlock",
|
||||
"shift-k": "editor::Hover",
|
||||
"shift-r": "vim::ToggleReplace",
|
||||
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
|
||||
"ctrl-f": "vim::PageDown",
|
||||
@@ -240,9 +236,6 @@
|
||||
],
|
||||
"g ]": "editor::GoToDiagnostic",
|
||||
"g [": "editor::GoToPrevDiagnostic",
|
||||
"g i": ["workspace::SendKeystrokes", "` ^ i"],
|
||||
"g ,": "vim::ChangeListNewer",
|
||||
"g ;": "vim::ChangeListOlder",
|
||||
"shift-h": "vim::WindowTop",
|
||||
"shift-m": "vim::WindowMiddle",
|
||||
"shift-l": "vim::WindowBottom",
|
||||
|
||||
@@ -12,8 +12,8 @@
|
||||
"base_keymap": "VSCode",
|
||||
// Features that can be globally enabled or disabled
|
||||
"features": {
|
||||
// Which inline completion provider to use.
|
||||
"inline_completion_provider": "copilot"
|
||||
// Show Copilot icon in status bar
|
||||
"copilot": true
|
||||
},
|
||||
// The name of a font to use for rendering text in the editor
|
||||
"buffer_font_family": "Zed Mono",
|
||||
@@ -299,9 +299,7 @@
|
||||
// The list of language servers to use (or disable) for all languages.
|
||||
//
|
||||
// This is typically customized on a per-language basis.
|
||||
"language_servers": [
|
||||
"..."
|
||||
],
|
||||
"language_servers": ["..."],
|
||||
// When to automatically save edited buffers. This setting can
|
||||
// take four values.
|
||||
//
|
||||
@@ -316,8 +314,6 @@
|
||||
"autosave": "off",
|
||||
// Settings related to the editor's tab bar.
|
||||
"tab_bar": {
|
||||
// Whether or not to show the tab bar in the editor
|
||||
"show": true,
|
||||
// Whether or not to show the navigation history buttons.
|
||||
"show_nav_history_buttons": true
|
||||
},
|
||||
@@ -372,13 +368,11 @@
|
||||
//
|
||||
// 1. Do not soft wrap.
|
||||
// "soft_wrap": "none",
|
||||
// 2. Prefer a single line generally, unless an overly long line is encountered.
|
||||
// "soft_wrap": "prefer_line",
|
||||
// 3. Soft wrap lines that overflow the editor:
|
||||
// 2. Soft wrap lines that overflow the editor:
|
||||
// "soft_wrap": "editor_width",
|
||||
// 4. Soft wrap lines at the preferred line length
|
||||
// 3. Soft wrap lines at the preferred line length
|
||||
// "soft_wrap": "preferred_line_length",
|
||||
"soft_wrap": "prefer_line",
|
||||
"soft_wrap": "none",
|
||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||
// is enabled.
|
||||
"preferred_line_length": 80,
|
||||
@@ -434,9 +428,7 @@
|
||||
"copilot": {
|
||||
// The set of glob patterns for which copilot should be disabled
|
||||
// in any matching file.
|
||||
"disabled_globs": [
|
||||
".env"
|
||||
]
|
||||
"disabled_globs": [".env"]
|
||||
},
|
||||
// Settings specific to journaling
|
||||
"journal": {
|
||||
@@ -547,12 +539,7 @@
|
||||
// Default directories to search for virtual environments, relative
|
||||
// to the current working directory. We recommend overriding this
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [
|
||||
".env",
|
||||
"env",
|
||||
".venv",
|
||||
"venv"
|
||||
],
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be 'csh', 'fish', and `nushell`
|
||||
"activate_script": "default"
|
||||
}
|
||||
@@ -601,14 +588,6 @@
|
||||
"C": {
|
||||
"format_on_save": "off"
|
||||
},
|
||||
"Elixir": {
|
||||
"language_servers": [
|
||||
"elixir-ls",
|
||||
"!next-ls",
|
||||
"!lexical",
|
||||
"..."
|
||||
]
|
||||
},
|
||||
"Gleam": {
|
||||
"tab_size": 2
|
||||
},
|
||||
@@ -617,20 +596,9 @@
|
||||
"source.organizeImports": true
|
||||
}
|
||||
},
|
||||
"HEEX": {
|
||||
"language_servers": [
|
||||
"elixir-ls",
|
||||
"!next-ls",
|
||||
"!lexical",
|
||||
"..."
|
||||
]
|
||||
},
|
||||
"Make": {
|
||||
"hard_tabs": true
|
||||
},
|
||||
"Markdown": {
|
||||
"format_on_save": "off"
|
||||
},
|
||||
"Prisma": {
|
||||
"tab_size": 2
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ language.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -12,6 +12,7 @@ use project::{LanguageServerProgress, Project};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -81,37 +82,27 @@ impl ActivityIndicator {
|
||||
}
|
||||
});
|
||||
|
||||
cx.subscribe(&this, move |_, _, event, cx| match event {
|
||||
cx.subscribe(&this, move |workspace, _, event, cx| match event {
|
||||
Event::ShowError { lsp_name, error } => {
|
||||
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
|
||||
let project = project.clone();
|
||||
let error = error.clone();
|
||||
let lsp_name = lsp_name.clone();
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
let buffer = create_buffer.await?;
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
if let Some(buffer) = project
|
||||
.update(cx, |project, cx| project.create_buffer(error, None, cx))
|
||||
.log_err()
|
||||
{
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[(
|
||||
0..0,
|
||||
format!("Language server error: {}\n\n{}", lsp_name, error),
|
||||
)],
|
||||
[(0..0, format!("Language server error: {}\n\n", lsp_name))],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new_view(|cx| {
|
||||
Editor::for_buffer(buffer, Some(project.clone()), cx)
|
||||
})),
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(
|
||||
cx.new_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)),
|
||||
),
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
@@ -281,14 +272,11 @@ impl ActivityIndicator {
|
||||
message: "Installing Zed update…".to_string(),
|
||||
on_click: None,
|
||||
},
|
||||
AutoUpdateStatus::Updated { binary_path } => Content {
|
||||
AutoUpdateStatus::Updated => Content {
|
||||
icon: None,
|
||||
message: "Click to restart and update Zed".to_string(),
|
||||
on_click: Some(Arc::new({
|
||||
let restart = workspace::Restart {
|
||||
binary_path: Some(binary_path.clone()),
|
||||
};
|
||||
move |_, cx| workspace::restart(&restart, cx)
|
||||
on_click: Some(Arc::new(|_, cx| {
|
||||
workspace::restart(&Default::default(), cx)
|
||||
})),
|
||||
},
|
||||
AutoUpdateStatus::Errored => Content {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::TryFrom, sync::Arc};
|
||||
use std::convert::TryFrom;
|
||||
use util::http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
@@ -141,7 +141,7 @@ pub enum TextDelta {
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
client: Arc<dyn HttpClient>,
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
|
||||
@@ -106,11 +106,6 @@ impl SavedConversationMetadata {
|
||||
.and_then(|name| name.to_str())
|
||||
.zip(metadata)
|
||||
{
|
||||
// This is used to filter out conversations saved by the new assistant.
|
||||
if !re.is_match(file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let title = re.replace(file_name, "");
|
||||
conversations.push(Self {
|
||||
title: title.into_owned(),
|
||||
|
||||
@@ -19,20 +19,17 @@ stories = ["dep:story"]
|
||||
anyhow.workspace = true
|
||||
assistant_tooling.workspace = true
|
||||
client.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
nanoid.workspace = true
|
||||
open_ai.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
rich_text.workspace = true
|
||||
schemars.workspace = true
|
||||
semantic_index.workspace = true
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
> Give me a comprehensive list of all the elements defined in my project using the following query: `impl Element for {}, impl<T: 'static> Element for {}, impl IntoElement for {})`
|
||||
@@ -1 +0,0 @@
|
||||
> What are all the places we define a new gpui element in my project? (impl Element for {})
|
||||
@@ -1,3 +0,0 @@
|
||||
Use tools frequently, especially when referring to files and code. The Zed editor we're working in can show me files directly when you add annotations. Be concise in chat, bountiful in tool calling.
|
||||
|
||||
Teach me everything you can about how zed loads settings. Please annotate the code inline.
|
||||
@@ -1 +0,0 @@
|
||||
> Can you tell me what the assistant2 crate is for in my project? Tell me in 100 words or less.
|
||||
378
crates/assistant2/examples/chat_with_functions.rs
Normal file
@@ -0,0 +1,378 @@
|
||||
//! This example creates a basic Chat UI with a function for rolling a die.
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use assets::Assets;
|
||||
use assistant2::AssistantPanel;
|
||||
use assistant_tooling::{LanguageModelTool, ToolRegistry};
|
||||
use client::{Client, UserStore};
|
||||
use fs::Fs;
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{actions, AnyElement, App, AppContext, KeyBinding, Model, Task, View, WindowOptions};
|
||||
use language::LanguageRegistry;
|
||||
use project::Project;
|
||||
use rand::Rng;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{KeymapFile, DEFAULT_KEYMAP_PATH};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use theme::LoadThemes;
|
||||
use ui::{div, prelude::*, Render};
|
||||
use util::ResultExt as _;
|
||||
|
||||
actions!(example, [Quit]);
|
||||
|
||||
struct RollDiceTool {}
|
||||
|
||||
impl RollDiceTool {
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum Die {
|
||||
D6 = 6,
|
||||
D20 = 20,
|
||||
}
|
||||
|
||||
impl Die {
|
||||
fn into_str(&self) -> &'static str {
|
||||
match self {
|
||||
Die::D6 => "d6",
|
||||
Die::D20 => "d20",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, JsonSchema, Clone)]
|
||||
struct DiceParams {
|
||||
/// The number of dice to roll.
|
||||
num_dice: u8,
|
||||
/// Which die to roll. Defaults to a d6 if not provided.
|
||||
die_type: Option<Die>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DieRoll {
|
||||
die: Die,
|
||||
roll: u8,
|
||||
}
|
||||
|
||||
impl DieRoll {
|
||||
fn render(&self) -> AnyElement {
|
||||
match self.die {
|
||||
Die::D6 => {
|
||||
let face = match self.roll {
|
||||
6 => div().child("⚅"),
|
||||
5 => div().child("⚄"),
|
||||
4 => div().child("⚃"),
|
||||
3 => div().child("⚂"),
|
||||
2 => div().child("⚁"),
|
||||
1 => div().child("⚀"),
|
||||
_ => div().child("😅"),
|
||||
};
|
||||
face.text_3xl().into_any_element()
|
||||
}
|
||||
_ => div()
|
||||
.child(format!("{}", self.roll))
|
||||
.text_3xl()
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DiceRoll {
|
||||
rolls: Vec<DieRoll>,
|
||||
}
|
||||
|
||||
pub struct DiceView {
|
||||
result: Result<DiceRoll>,
|
||||
}
|
||||
|
||||
impl Render for DiceView {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let output = match &self.result {
|
||||
Ok(output) => output,
|
||||
Err(_) => return "Somehow dice failed 🎲".into_any_element(),
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.children(
|
||||
output
|
||||
.rolls
|
||||
.iter()
|
||||
.map(|roll| div().p_2().child(roll.render())),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for RollDiceTool {
|
||||
type Input = DiceParams;
|
||||
type Output = DiceRoll;
|
||||
type View = DiceView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"roll_dice".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Rolls N many dice and returns the results.".to_string()
|
||||
}
|
||||
|
||||
fn execute(
|
||||
&self,
|
||||
input: &Self::Input,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<gpui::Result<Self::Output>> {
|
||||
let rolls = (0..input.num_dice)
|
||||
.map(|_| {
|
||||
let die_type = input.die_type.as_ref().unwrap_or(&Die::D6).clone();
|
||||
|
||||
DieRoll {
|
||||
die: die_type.clone(),
|
||||
roll: rand::thread_rng().gen_range(1..=die_type as u8),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
return Task::ready(Ok(DiceRoll { rolls }));
|
||||
}
|
||||
|
||||
fn output_view(
|
||||
_tool_call_id: String,
|
||||
_input: Self::Input,
|
||||
result: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> gpui::View<Self::View> {
|
||||
cx.new_view(|_cx| DiceView { result })
|
||||
}
|
||||
|
||||
fn format(_: &Self::Input, output: &Result<Self::Output>) -> String {
|
||||
let output = match output {
|
||||
Ok(output) => output,
|
||||
Err(_) => return "Somehow dice failed 🎲".to_string(),
|
||||
};
|
||||
|
||||
let mut result = String::new();
|
||||
for roll in &output.rolls {
|
||||
let die = &roll.die;
|
||||
result.push_str(&format!("{}: {}\n", die.into_str(), roll.roll));
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
struct FileBrowserTool {
|
||||
fs: Arc<dyn Fs>,
|
||||
root_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl FileBrowserTool {
|
||||
fn new(fs: Arc<dyn Fs>, root_dir: PathBuf) -> Self {
|
||||
Self { fs, root_dir }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
struct FileBrowserParams {
|
||||
command: FileBrowserCommand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
enum FileBrowserCommand {
|
||||
Ls { path: PathBuf },
|
||||
Cat { path: PathBuf },
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum FileBrowserOutput {
|
||||
Ls { entries: Vec<String> },
|
||||
Cat { content: String },
|
||||
}
|
||||
|
||||
pub struct FileBrowserView {
|
||||
result: Result<FileBrowserOutput>,
|
||||
}
|
||||
|
||||
impl Render for FileBrowserView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let Ok(output) = self.result.as_ref() else {
|
||||
return h_flex().child("Failed to perform operation");
|
||||
};
|
||||
|
||||
match output {
|
||||
FileBrowserOutput::Ls { entries } => v_flex().children(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|entry| h_flex().text_ui(cx).child(entry.clone())),
|
||||
),
|
||||
FileBrowserOutput::Cat { content } => h_flex().child(content.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for FileBrowserTool {
|
||||
type Input = FileBrowserParams;
|
||||
type Output = FileBrowserOutput;
|
||||
type View = FileBrowserView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"file_browser".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"A tool for browsing the filesystem.".to_string()
|
||||
}
|
||||
|
||||
fn execute(
|
||||
&self,
|
||||
input: &Self::Input,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<gpui::Result<Self::Output>> {
|
||||
cx.spawn({
|
||||
let fs = self.fs.clone();
|
||||
let root_dir = self.root_dir.clone();
|
||||
let input = input.clone();
|
||||
|_cx| async move {
|
||||
match input.command {
|
||||
FileBrowserCommand::Ls { path } => {
|
||||
let path = root_dir.join(path);
|
||||
|
||||
let mut output = fs.read_dir(&path).await?;
|
||||
|
||||
let mut entries = Vec::new();
|
||||
while let Some(entry) = output.next().await {
|
||||
let entry = entry?;
|
||||
entries.push(entry.display().to_string());
|
||||
}
|
||||
|
||||
Ok(FileBrowserOutput::Ls { entries })
|
||||
}
|
||||
FileBrowserCommand::Cat { path } => {
|
||||
let path = root_dir.join(path);
|
||||
|
||||
let output = fs.load(&path).await?;
|
||||
|
||||
Ok(FileBrowserOutput::Cat { content: output })
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn output_view(
|
||||
_tool_call_id: String,
|
||||
_input: Self::Input,
|
||||
result: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> gpui::View<Self::View> {
|
||||
cx.new_view(|_cx| FileBrowserView { result })
|
||||
}
|
||||
|
||||
fn format(_input: &Self::Input, output: &Result<Self::Output>) -> String {
|
||||
let Ok(output) = output else {
|
||||
return "Failed to perform command: {input:?}".to_string();
|
||||
};
|
||||
|
||||
match output {
|
||||
FileBrowserOutput::Ls { entries } => entries.join("\n"),
|
||||
FileBrowserOutput::Cat { content } => content.to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
App::new().with_assets(Assets).run(|cx| {
|
||||
cx.bind_keys(Some(KeyBinding::new("cmd-q", Quit, None)));
|
||||
cx.on_action(|_: &Quit, cx: &mut AppContext| {
|
||||
cx.quit();
|
||||
});
|
||||
|
||||
settings::init(cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
editor::init(cx);
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
Assets.load_fonts(cx).unwrap();
|
||||
KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, cx).unwrap();
|
||||
client::init_settings(cx);
|
||||
release_channel::init("0.130.0", cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
{
|
||||
let client = client.clone();
|
||||
cx.spawn(|cx| async move { client.authenticate_and_connect(false, &cx).await })
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
assistant2::init(client.clone(), cx);
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::new(
|
||||
Task::ready(()),
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
let node_runtime = node_runtime::RealNodeRuntime::new(client.http_client());
|
||||
languages::init(language_registry.clone(), node_runtime, cx);
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
cx.update(|cx| {
|
||||
let fs = Arc::new(fs::RealFs::new(None));
|
||||
let cwd = std::env::current_dir().expect("Failed to get current working directory");
|
||||
|
||||
cx.open_window(WindowOptions::default(), |cx| {
|
||||
let mut tool_registry = ToolRegistry::new();
|
||||
tool_registry
|
||||
.register(RollDiceTool::new(), cx)
|
||||
.context("failed to register DummyTool")
|
||||
.log_err();
|
||||
|
||||
tool_registry
|
||||
.register(FileBrowserTool::new(fs, cwd), cx)
|
||||
.context("failed to register FileBrowserTool")
|
||||
.log_err();
|
||||
|
||||
let tool_registry = Arc::new(tool_registry);
|
||||
|
||||
println!("Tools registered");
|
||||
for definition in tool_registry.definitions() {
|
||||
println!("{}", definition);
|
||||
}
|
||||
|
||||
cx.new_view(|cx| Example::new(language_registry, tool_registry, user_store, cx))
|
||||
});
|
||||
cx.activate(true);
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
}
|
||||
|
||||
struct Example {
|
||||
assistant_panel: View<AssistantPanel>,
|
||||
}
|
||||
|
||||
impl Example {
|
||||
fn new(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
user_store: Model<UserStore>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
assistant_panel: cx.new_view(|cx| {
|
||||
AssistantPanel::new(language_registry, tool_registry, user_store, None, cx)
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for Example {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl ui::prelude::IntoElement {
|
||||
div().size_full().child(self.assistant_panel.clone())
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
mod active_file;
|
||||
|
||||
pub use active_file::*;
|
||||
@@ -1,144 +0,0 @@
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tooling::{AttachmentOutput, LanguageModelAttachment, ProjectContext};
|
||||
use editor::Editor;
|
||||
use gpui::{Render, Task, View, WeakModel, WeakView};
|
||||
use language::Buffer;
|
||||
use project::ProjectPath;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ui::{prelude::*, ButtonLike, Tooltip, WindowContext};
|
||||
use util::maybe;
|
||||
use workspace::Workspace;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ActiveEditorAttachment {
|
||||
#[serde(skip)]
|
||||
buffer: Option<WeakModel<Buffer>>,
|
||||
path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct FileAttachmentView {
|
||||
project_path: Option<ProjectPath>,
|
||||
buffer: Option<WeakModel<Buffer>>,
|
||||
error: Option<anyhow::Error>,
|
||||
}
|
||||
|
||||
impl Render for FileAttachmentView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
if let Some(error) = &self.error {
|
||||
return div().child(error.to_string()).into_any_element();
|
||||
}
|
||||
|
||||
let filename: SharedString = self
|
||||
.project_path
|
||||
.as_ref()
|
||||
.and_then(|p| p.path.file_name()?.to_str())
|
||||
.unwrap_or("Untitled")
|
||||
.to_string()
|
||||
.into();
|
||||
|
||||
ButtonLike::new("file-attachment")
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_md()
|
||||
.child(ui::Icon::new(IconName::File))
|
||||
.child(filename.clone()),
|
||||
)
|
||||
.tooltip(move |cx| Tooltip::with_meta("File Attached", None, filename.clone(), cx))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl AttachmentOutput for FileAttachmentView {
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String {
|
||||
if let Some(path) = &self.project_path {
|
||||
project.add_file(path.clone());
|
||||
return format!("current file: {}", path.path.display());
|
||||
}
|
||||
|
||||
if let Some(buffer) = self.buffer.as_ref().and_then(|buffer| buffer.upgrade()) {
|
||||
return format!("current untitled buffer text:\n{}", buffer.read(cx).text());
|
||||
}
|
||||
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ActiveEditorAttachmentTool {
|
||||
workspace: WeakView<Workspace>,
|
||||
}
|
||||
|
||||
impl ActiveEditorAttachmentTool {
|
||||
pub fn new(workspace: WeakView<Workspace>, _cx: &mut WindowContext) -> Self {
|
||||
Self { workspace }
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelAttachment for ActiveEditorAttachmentTool {
|
||||
type Output = ActiveEditorAttachment;
|
||||
type View = FileAttachmentView;
|
||||
|
||||
fn name(&self) -> Arc<str> {
|
||||
"active-editor-attachment".into()
|
||||
}
|
||||
|
||||
fn run(&self, cx: &mut WindowContext) -> Task<Result<ActiveEditorAttachment>> {
|
||||
Task::ready(maybe!({
|
||||
let active_buffer = self
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| Some(item.act_as::<Editor>(cx)?.read(cx).buffer().clone()))
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no active buffer"))?;
|
||||
|
||||
let buffer = active_buffer.read(cx);
|
||||
|
||||
if let Some(buffer) = buffer.as_singleton() {
|
||||
let path = project::File::from_dyn(buffer.read(cx).file())
|
||||
.and_then(|file| file.worktree.read(cx).absolutize(&file.path).ok());
|
||||
return Ok(ActiveEditorAttachment {
|
||||
buffer: Some(buffer.downgrade()),
|
||||
path,
|
||||
});
|
||||
} else {
|
||||
Err(anyhow!("no active buffer"))
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn view(
|
||||
&self,
|
||||
output: Result<ActiveEditorAttachment>,
|
||||
cx: &mut WindowContext,
|
||||
) -> View<Self::View> {
|
||||
let error;
|
||||
let project_path;
|
||||
let buffer;
|
||||
match output {
|
||||
Ok(output) => {
|
||||
error = None;
|
||||
let workspace = self.workspace.upgrade().unwrap();
|
||||
let project = workspace.read(cx).project();
|
||||
project_path = output
|
||||
.path
|
||||
.and_then(|path| project.read(cx).project_path_for_absolute_path(&path, cx));
|
||||
buffer = output.buffer;
|
||||
}
|
||||
Err(err) => {
|
||||
error = Some(err);
|
||||
buffer = None;
|
||||
project_path = None;
|
||||
}
|
||||
}
|
||||
cx.new_view(|_cx| FileAttachmentView {
|
||||
project_path,
|
||||
buffer,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -33,7 +33,7 @@ impl CompletionProvider {
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: Vec<ToolFunctionDefinition>,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>
|
||||
{
|
||||
self.0.complete(model, messages, stop, temperature, tools)
|
||||
@@ -51,7 +51,7 @@ pub trait CompletionProviderBackend: 'static {
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: Vec<ToolFunctionDefinition>,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>;
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ impl CompletionProviderBackend for CloudCompletionProvider {
|
||||
messages: Vec<CompletionMessage>,
|
||||
stop: Vec<String>,
|
||||
temperature: f32,
|
||||
tools: Vec<ToolFunctionDefinition>,
|
||||
tools: &[ToolFunctionDefinition],
|
||||
) -> BoxFuture<'static, Result<BoxStream<'static, Result<proto::LanguageModelResponseMessage>>>>
|
||||
{
|
||||
let client = self.client.clone();
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::{SavedToolFunctionCall, SavedUserAttachment};
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::SharedString;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::paths::CONVERSATIONS_DIR;
|
||||
|
||||
use crate::MessageId;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedConversation {
|
||||
/// The schema version of the conversation.
|
||||
pub version: String,
|
||||
/// The title of the conversation, generated by the Assistant.
|
||||
pub title: String,
|
||||
pub messages: Vec<SavedChatMessage>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum SavedChatMessage {
|
||||
User {
|
||||
id: MessageId,
|
||||
body: String,
|
||||
attachments: Vec<SavedUserAttachment>,
|
||||
},
|
||||
Assistant {
|
||||
id: MessageId,
|
||||
messages: Vec<SavedAssistantMessagePart>,
|
||||
error: Option<SharedString>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedAssistantMessagePart {
|
||||
pub body: SharedString,
|
||||
pub tool_calls: Vec<SavedToolFunctionCall>,
|
||||
}
|
||||
|
||||
pub struct SavedConversationMetadata {
|
||||
pub title: String,
|
||||
pub path: PathBuf,
|
||||
pub mtime: chrono::DateTime<chrono::Local>,
|
||||
}
|
||||
|
||||
impl SavedConversationMetadata {
|
||||
pub async fn list(fs: Arc<dyn Fs>) -> Result<Vec<Self>> {
|
||||
fs.create_dir(&CONVERSATIONS_DIR).await?;
|
||||
|
||||
let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?;
|
||||
let mut conversations = Vec::new();
|
||||
while let Some(path) = paths.next().await {
|
||||
let path = path?;
|
||||
if path.extension() != Some(OsStr::new("json")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pattern = r" - \d+.zed.\d.\d.\d.json$";
|
||||
let re = Regex::new(pattern).unwrap();
|
||||
|
||||
let metadata = fs.metadata(&path).await?;
|
||||
if let Some((file_name, metadata)) = path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.zip(metadata)
|
||||
{
|
||||
// This is used to filter out conversations saved by the old assistant.
|
||||
if !re.is_match(file_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let title = re.replace(file_name, "");
|
||||
conversations.push(Self {
|
||||
title: title.into_owned(),
|
||||
path,
|
||||
mtime: metadata.mtime.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime));
|
||||
|
||||
Ok(conversations)
|
||||
}
|
||||
}
|
||||
@@ -1,196 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
|
||||
use gpui::{AppContext, DismissEvent, EventEmitter, FocusHandle, FocusableView, View, WeakView};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::saved_conversation::SavedConversationMetadata;
|
||||
|
||||
pub struct SavedConversations {
|
||||
focus_handle: FocusHandle,
|
||||
picker: Option<View<Picker<SavedConversationPickerDelegate>>>,
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for SavedConversations {}
|
||||
|
||||
impl FocusableView for SavedConversations {
|
||||
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
|
||||
if let Some(picker) = self.picker.as_ref() {
|
||||
picker.focus_handle(cx)
|
||||
} else {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SavedConversations {
|
||||
pub fn new(cx: &mut ViewContext<Self>) -> Self {
|
||||
Self {
|
||||
focus_handle: cx.focus_handle(),
|
||||
picker: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
&mut self,
|
||||
saved_conversations: Vec<SavedConversationMetadata>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let delegate =
|
||||
SavedConversationPickerDelegate::new(cx.view().downgrade(), saved_conversations);
|
||||
self.picker = Some(cx.new_view(|cx| Picker::uniform_list(delegate, cx).modal(false)));
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for SavedConversations {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.children(self.picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SavedConversationPickerDelegate {
|
||||
view: WeakView<SavedConversations>,
|
||||
saved_conversations: Vec<SavedConversationMetadata>,
|
||||
selected_index: usize,
|
||||
matches: Vec<StringMatch>,
|
||||
}
|
||||
|
||||
impl SavedConversationPickerDelegate {
|
||||
pub fn new(
|
||||
weak_view: WeakView<SavedConversations>,
|
||||
saved_conversations: Vec<SavedConversationMetadata>,
|
||||
) -> Self {
|
||||
let matches = saved_conversations
|
||||
.iter()
|
||||
.map(|conversation| StringMatch {
|
||||
candidate_id: 0,
|
||||
score: 0.0,
|
||||
positions: Default::default(),
|
||||
string: conversation.title.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
view: weak_view,
|
||||
saved_conversations,
|
||||
selected_index: 0,
|
||||
matches,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for SavedConversationPickerDelegate {
|
||||
type ListItem = ui::ListItem;
|
||||
|
||||
fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
|
||||
"Select saved conversation...".into()
|
||||
}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext<Picker<Self>>) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> gpui::Task<()> {
|
||||
let background_executor = cx.background_executor().clone();
|
||||
let candidates = self
|
||||
.saved_conversations
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, conversation)| {
|
||||
let text = conversation.title.clone();
|
||||
|
||||
StringMatchCandidate {
|
||||
id,
|
||||
char_bag: text.as_str().into(),
|
||||
string: text,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: candidate.string,
|
||||
positions: Vec::new(),
|
||||
score: 0.0,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
background_executor,
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = this
|
||||
.delegate
|
||||
.selected_index
|
||||
.min(this.delegate.matches.len().saturating_sub(1));
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if self.matches.is_empty() {
|
||||
self.dismissed(cx);
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Implement selecting a saved conversation.
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, cx: &mut ui::prelude::ViewContext<Picker<Self>>) {
|
||||
self.view
|
||||
.update(cx, |_, cx| cx.emit(DismissEvent))
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let conversation_match = &self.matches[ix];
|
||||
let _conversation = &self.saved_conversations[conversation_match.candidate_id];
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.selected(selected)
|
||||
.child(HighlightedLabel::new(
|
||||
conversation_match.string.clone(),
|
||||
conversation_match.positions.clone(),
|
||||
)),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
mod annotate_code;
|
||||
mod create_buffer;
|
||||
mod project_index;
|
||||
|
||||
pub use annotate_code::*;
|
||||
pub use create_buffer::*;
|
||||
pub use project_index::*;
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::{LanguageModelTool, ProjectContext, ToolOutput};
|
||||
use editor::{
|
||||
display_map::{BlockContext, BlockDisposition, BlockProperties, BlockStyle},
|
||||
Editor, MultiBuffer,
|
||||
};
|
||||
use futures::{channel::mpsc::UnboundedSender, StreamExt as _};
|
||||
use gpui::{prelude::*, AnyElement, AsyncWindowContext, Model, Task, View, WeakView};
|
||||
use language::ToPoint;
|
||||
use project::{search::SearchQuery, Project, ProjectPath};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use std::path::Path;
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct AnnotationTool {
|
||||
workspace: WeakView<Workspace>,
|
||||
project: Model<Project>,
|
||||
}
|
||||
|
||||
impl AnnotationTool {
|
||||
pub fn new(workspace: WeakView<Workspace>, project: Model<Project>) -> Self {
|
||||
Self { workspace, project }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Deserialize, JsonSchema, Clone)]
|
||||
pub struct AnnotationInput {
|
||||
/// Name for this set of annotations
|
||||
#[serde(default = "default_title")]
|
||||
title: String,
|
||||
/// Excerpts from the file to show to the user.
|
||||
excerpts: Vec<Excerpt>,
|
||||
}
|
||||
|
||||
fn default_title() -> String {
|
||||
"Untitled".to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, JsonSchema, Clone)]
|
||||
struct Excerpt {
|
||||
/// Path to the file
|
||||
path: String,
|
||||
/// A short, distinctive string that appears in the file, used to define a location in the file.
|
||||
text_passage: String,
|
||||
/// Text to display above the code excerpt. All explanation of code should be included here.
|
||||
annotation: String,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for AnnotationTool {
|
||||
type View = AnnotationResultView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"show_code_file_excerpts".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"
|
||||
Show and explain code from the current project
|
||||
Opens a buffer in a separate pane/tab, to the side of the conversation.
|
||||
The annotations are shown in the editor as block decorations.
|
||||
Many related excerpts can be shown at once.
|
||||
"
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View> {
|
||||
cx.new_view(|cx| {
|
||||
let (tx, mut rx) = futures::channel::mpsc::unbounded();
|
||||
cx.spawn(|view, mut cx| async move {
|
||||
while let Some(excerpt) = rx.next().await {
|
||||
AnnotationResultView::add_excerpt(view.clone(), excerpt, &mut cx).await?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
AnnotationResultView {
|
||||
project: self.project.clone(),
|
||||
workspace: self.workspace.clone(),
|
||||
tx,
|
||||
pending_excerpt: None,
|
||||
added_editor_to_workspace: false,
|
||||
editor: None,
|
||||
error: None,
|
||||
rendered_excerpt_count: 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AnnotationResultView {
|
||||
workspace: WeakView<Workspace>,
|
||||
project: Model<Project>,
|
||||
pending_excerpt: Option<Excerpt>,
|
||||
added_editor_to_workspace: bool,
|
||||
editor: Option<View<Editor>>,
|
||||
tx: UnboundedSender<Excerpt>,
|
||||
error: Option<anyhow::Error>,
|
||||
rendered_excerpt_count: usize,
|
||||
}
|
||||
|
||||
impl AnnotationResultView {
|
||||
async fn add_excerpt(
|
||||
this: WeakView<Self>,
|
||||
excerpt: Excerpt,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let project = this.update(cx, |this, _cx| this.project.clone())?;
|
||||
|
||||
let worktree_id = project.update(cx, |project, cx| {
|
||||
let worktree = project.worktrees().next()?;
|
||||
let worktree_id = worktree.read(cx).id();
|
||||
Some(worktree_id)
|
||||
})?;
|
||||
|
||||
let worktree_id = if let Some(worktree_id) = worktree_id {
|
||||
worktree_id
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("No worktree found"));
|
||||
};
|
||||
|
||||
let buffer_task = project.update(cx, |project, cx| {
|
||||
project.open_buffer(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new(&excerpt.path).into(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
|
||||
let buffer = match buffer_task.await {
|
||||
Ok(buffer) => buffer,
|
||||
Err(error) => {
|
||||
return this.update(cx, |this, cx| {
|
||||
this.error = Some(error);
|
||||
cx.notify();
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let snapshot = buffer.update(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let query = SearchQuery::text(&excerpt.text_passage, false, false, false, vec![], vec![])?;
|
||||
let matches = query.search(&snapshot, None).await;
|
||||
let Some(first_match) = matches.first() else {
|
||||
log::warn!(
|
||||
"text {:?} does not appear in '{}'",
|
||||
excerpt.text_passage,
|
||||
excerpt.path
|
||||
);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let mut start = first_match.start.to_point(&snapshot);
|
||||
start.column = 0;
|
||||
|
||||
if let Some(editor) = &this.editor {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let ranges = editor.buffer().update(cx, |multibuffer, cx| {
|
||||
multibuffer.push_excerpts_with_context_lines(
|
||||
buffer.clone(),
|
||||
vec![start..start],
|
||||
5,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let annotation = SharedString::from(excerpt.annotation);
|
||||
editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
position: ranges[0].start,
|
||||
height: annotation.split('\n').count() as u8 + 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(move |cx| Self::render_note_block(&annotation, cx)),
|
||||
disposition: BlockDisposition::Above,
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
if !this.added_editor_to_workspace {
|
||||
this.added_editor_to_workspace = true;
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn render_note_block(explanation: &SharedString, cx: &mut BlockContext) -> AnyElement {
|
||||
let anchor_x = cx.anchor_x;
|
||||
let gutter_width = cx.gutter_dimensions.width;
|
||||
|
||||
h_flex()
|
||||
.w_full()
|
||||
.py_2()
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_center()
|
||||
.w(gutter_width)
|
||||
.child(Icon::new(IconName::Ai).color(Color::Hint)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.ml(anchor_x - gutter_width)
|
||||
.child(explanation.clone()),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AnnotationResultView {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
if let Some(error) = &self.error {
|
||||
ui::Label::new(error.to_string()).into_any_element()
|
||||
} else {
|
||||
ui::Label::new(SharedString::from(format!(
|
||||
"Opened a buffer with {} excerpts",
|
||||
self.rendered_excerpt_count
|
||||
)))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolOutput for AnnotationResultView {
|
||||
type Input = AnnotationInput;
|
||||
type SerializedState = Option<String>;
|
||||
|
||||
fn generate(&self, _: &mut ProjectContext, _: &mut ViewContext<Self>) -> String {
|
||||
if let Some(error) = &self.error {
|
||||
format!("Failed to create buffer: {error:?}")
|
||||
} else {
|
||||
format!(
|
||||
"opened {} excerpts in a buffer",
|
||||
self.rendered_excerpt_count
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_input(&mut self, mut input: Self::Input, cx: &mut ViewContext<Self>) {
|
||||
let editor = if let Some(editor) = &self.editor {
|
||||
editor.clone()
|
||||
} else {
|
||||
let multibuffer = cx.new_model(|_cx| {
|
||||
MultiBuffer::new(0, language::Capability::ReadWrite).with_title(String::new())
|
||||
});
|
||||
let editor = cx.new_view(|cx| {
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(self.project.clone()), cx)
|
||||
});
|
||||
|
||||
self.editor = Some(editor.clone());
|
||||
editor
|
||||
};
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
if multibuffer.title(cx) != input.title {
|
||||
multibuffer.set_title(input.title.clone(), cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.pending_excerpt = input.excerpts.pop();
|
||||
for excerpt in input.excerpts.iter().skip(self.rendered_excerpt_count) {
|
||||
self.tx.unbounded_send(excerpt.clone()).ok();
|
||||
}
|
||||
self.rendered_excerpt_count = input.excerpts.len();
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn execute(&mut self, _cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
if let Some(excerpt) = self.pending_excerpt.take() {
|
||||
self.rendered_excerpt_count += 1;
|
||||
self.tx.unbounded_send(excerpt.clone()).ok();
|
||||
}
|
||||
|
||||
self.tx.close_channel();
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn serialize(&self, _cx: &mut ViewContext<Self>) -> Self::SerializedState {
|
||||
self.error.as_ref().map(|error| error.to_string())
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
output: Self::SerializedState,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some(error_message) = output {
|
||||
self.error = Some(anyhow::anyhow!("{}", error_message));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tooling::{LanguageModelTool, ProjectContext, ToolOutput};
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::LanguageModelTool;
|
||||
use editor::Editor;
|
||||
use gpui::{prelude::*, Model, Task, View, WeakView};
|
||||
use project::Project;
|
||||
@@ -20,7 +20,7 @@ impl CreateBufferTool {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, JsonSchema)]
|
||||
#[derive(Debug, Deserialize, JsonSchema)]
|
||||
pub struct CreateBufferInput {
|
||||
/// The contents of the buffer.
|
||||
text: String,
|
||||
@@ -31,70 +31,28 @@ pub struct CreateBufferInput {
|
||||
language: String,
|
||||
}
|
||||
|
||||
pub struct CreateBufferOutput {}
|
||||
|
||||
impl LanguageModelTool for CreateBufferTool {
|
||||
type Input = CreateBufferInput;
|
||||
type Output = CreateBufferOutput;
|
||||
type View = CreateBufferView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"create_new_source_file".to_string()
|
||||
"create_buffer".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Create a new file in the current codebase. Only use this when generating new code, NOT when showing existing code from the project.".to_string()
|
||||
"Create a new buffer in the current codebase".to_string()
|
||||
}
|
||||
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View> {
|
||||
cx.new_view(|_cx| CreateBufferView {
|
||||
workspace: self.workspace.clone(),
|
||||
project: self.project.clone(),
|
||||
input: None,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CreateBufferView {
|
||||
workspace: WeakView<Workspace>,
|
||||
project: Model<Project>,
|
||||
input: Option<CreateBufferInput>,
|
||||
error: Option<anyhow::Error>,
|
||||
}
|
||||
|
||||
impl Render for CreateBufferView {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div().child("Opening a buffer")
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolOutput for CreateBufferView {
|
||||
type Input = CreateBufferInput;
|
||||
|
||||
type SerializedState = ();
|
||||
|
||||
fn generate(&self, _project: &mut ProjectContext, _cx: &mut ViewContext<Self>) -> String {
|
||||
let Some(input) = self.input.as_ref() else {
|
||||
return "No input".to_string();
|
||||
};
|
||||
|
||||
match &self.error {
|
||||
None => format!("Created a new {} buffer", input.language),
|
||||
Some(err) => format!("Failed to create buffer: {err:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_input(&mut self, input: Self::Input, _cx: &mut ViewContext<Self>) {
|
||||
self.input = Some(input);
|
||||
}
|
||||
|
||||
fn execute(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
fn execute(&self, input: &Self::Input, cx: &mut WindowContext) -> Task<Result<Self::Output>> {
|
||||
cx.spawn({
|
||||
let workspace = self.workspace.clone();
|
||||
let project = self.project.clone();
|
||||
let input = self.input.clone();
|
||||
|_this, mut cx| async move {
|
||||
let input = input.ok_or_else(|| anyhow!("no input"))?;
|
||||
|
||||
let text = input.text.clone();
|
||||
let language_name = input.language.clone();
|
||||
let text = input.text.clone();
|
||||
let language_name = input.language.clone();
|
||||
|mut cx| async move {
|
||||
let language = cx
|
||||
.update(|cx| {
|
||||
project
|
||||
@@ -104,14 +62,11 @@ impl ToolOutput for CreateBufferView {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let buffer = cx
|
||||
.update(|cx| project.update(cx, |project, cx| project.create_buffer(cx)))?
|
||||
.await?;
|
||||
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, text)], None, cx);
|
||||
buffer.set_language(Some(language), cx)
|
||||
})?;
|
||||
let buffer = cx.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.create_buffer(&text, Some(language), cx)
|
||||
})
|
||||
})??;
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
@@ -125,20 +80,32 @@ impl ToolOutput for CreateBufferView {
|
||||
})
|
||||
.log_err();
|
||||
|
||||
Ok(())
|
||||
Ok(CreateBufferOutput {})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize(&self, _cx: &mut ViewContext<Self>) -> Self::SerializedState {
|
||||
()
|
||||
fn format(input: &Self::Input, output: &Result<Self::Output>) -> String {
|
||||
match output {
|
||||
Ok(_) => format!("Created a new {} buffer", input.language),
|
||||
Err(err) => format!("Failed to create buffer: {err:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
_output: Self::SerializedState,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
fn output_view(
|
||||
_tool_call_id: String,
|
||||
_input: Self::Input,
|
||||
_output: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> View<Self::View> {
|
||||
cx.new_view(|_cx| CreateBufferView {})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CreateBufferView {}
|
||||
|
||||
impl Render for CreateBufferView {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div().child("Opening a buffer")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,40 +1,32 @@
|
||||
use anyhow::Result;
|
||||
use assistant_tooling::{LanguageModelTool, ToolOutput};
|
||||
use collections::BTreeMap;
|
||||
use gpui::{prelude::*, Model, Task};
|
||||
use project::ProjectPath;
|
||||
use assistant_tooling::LanguageModelTool;
|
||||
use gpui::{prelude::*, AnyView, Model, Task};
|
||||
use project::Fs;
|
||||
use schemars::JsonSchema;
|
||||
use semantic_index::{ProjectIndex, Status};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Write as _, ops::Range, path::Path, sync::Arc};
|
||||
use ui::{prelude::*, CollapsibleContainer, Color, Icon, IconName, Label, WindowContext};
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use ui::{
|
||||
div, prelude::*, CollapsibleContainer, Color, Icon, IconName, Label, SharedString,
|
||||
WindowContext,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
const DEFAULT_SEARCH_LIMIT: usize = 20;
|
||||
|
||||
pub struct ProjectIndexTool {
|
||||
project_index: Model<ProjectIndex>,
|
||||
#[derive(Clone)]
|
||||
pub struct CodebaseExcerpt {
|
||||
path: SharedString,
|
||||
text: SharedString,
|
||||
score: f32,
|
||||
element_id: ElementId,
|
||||
expanded: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
enum ProjectIndexToolState {
|
||||
#[default]
|
||||
CollectingQuery,
|
||||
Searching,
|
||||
Error(anyhow::Error),
|
||||
Finished {
|
||||
excerpts: BTreeMap<ProjectPath, Vec<Range<usize>>>,
|
||||
index_status: Status,
|
||||
},
|
||||
}
|
||||
// Note: Comments on a `LanguageModelTool::Input` become descriptions on the generated JSON schema as shown to the language model.
|
||||
// Any changes or deletions to the `CodebaseQuery` comments will change model behavior.
|
||||
|
||||
pub struct ProjectIndexView {
|
||||
project_index: Model<ProjectIndex>,
|
||||
input: CodebaseQuery,
|
||||
expanded_header: bool,
|
||||
state: ProjectIndexToolState,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, JsonSchema)]
|
||||
#[derive(Deserialize, JsonSchema)]
|
||||
pub struct CodebaseQuery {
|
||||
/// Semantic search query
|
||||
query: String,
|
||||
@@ -42,22 +34,23 @@ pub struct CodebaseQuery {
|
||||
limit: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SerializedState {
|
||||
index_status: Status,
|
||||
error_message: Option<String>,
|
||||
worktrees: BTreeMap<Arc<Path>, WorktreeIndexOutput>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
struct WorktreeIndexOutput {
|
||||
excerpts: BTreeMap<Arc<Path>, Vec<Range<usize>>>,
|
||||
pub struct ProjectIndexView {
|
||||
input: CodebaseQuery,
|
||||
output: Result<ProjectIndexOutput>,
|
||||
}
|
||||
|
||||
impl ProjectIndexView {
|
||||
fn toggle_header(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.expanded_header = !self.expanded_header;
|
||||
cx.notify();
|
||||
fn toggle_expanded(&mut self, element_id: ElementId, cx: &mut ViewContext<Self>) {
|
||||
if let Ok(output) = &mut self.output {
|
||||
if let Some(excerpt) = output
|
||||
.excerpts
|
||||
.iter_mut()
|
||||
.find(|excerpt| excerpt.element_id == element_id)
|
||||
{
|
||||
excerpt.expanded = !excerpt.expanded;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,215 +58,76 @@ impl Render for ProjectIndexView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let query = self.input.query.clone();
|
||||
|
||||
let (header_text, content) = match &self.state {
|
||||
ProjectIndexToolState::Error(error) => {
|
||||
return format!("failed to search: {error:?}").into_any_element()
|
||||
}
|
||||
ProjectIndexToolState::CollectingQuery | ProjectIndexToolState::Searching => {
|
||||
("Searching...".to_string(), div())
|
||||
}
|
||||
ProjectIndexToolState::Finished { excerpts, .. } => {
|
||||
let file_count = excerpts.len();
|
||||
let result = &self.output;
|
||||
|
||||
let header_text = format!(
|
||||
"Read {} {}",
|
||||
file_count,
|
||||
if file_count == 1 { "file" } else { "files" }
|
||||
);
|
||||
|
||||
let el = v_flex().gap_2().children(excerpts.keys().map(|path| {
|
||||
h_flex().gap_2().child(Icon::new(IconName::File)).child(
|
||||
Label::new(path.path.to_string_lossy().to_string()).color(Color::Muted),
|
||||
)
|
||||
}));
|
||||
|
||||
(header_text, el)
|
||||
let output = match result {
|
||||
Err(err) => {
|
||||
return div().child(Label::new(format!("Error: {}", err)).color(Color::Error));
|
||||
}
|
||||
Ok(output) => output,
|
||||
};
|
||||
|
||||
let header = h_flex()
|
||||
div()
|
||||
.v_flex()
|
||||
.gap_2()
|
||||
.child(Icon::new(IconName::File))
|
||||
.child(header_text);
|
||||
|
||||
v_flex()
|
||||
.gap_3()
|
||||
.child(
|
||||
CollapsibleContainer::new("collapsible-container", self.expanded_header)
|
||||
.start_slot(header)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.toggle_header(cx);
|
||||
}))
|
||||
div()
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_3()
|
||||
.p_3()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Icon::new(IconName::MagnifyingGlass))
|
||||
.child(Label::new(format!("`{}`", query)).color(Color::Muted)),
|
||||
)
|
||||
.child(content),
|
||||
h_flex()
|
||||
.child(Label::new("Query: ").color(Color::Modified))
|
||||
.child(Label::new(query).color(Color::Muted)),
|
||||
),
|
||||
)
|
||||
.into_any_element()
|
||||
.children(output.excerpts.iter().map(|excerpt| {
|
||||
let element_id = excerpt.element_id.clone();
|
||||
let expanded = excerpt.expanded;
|
||||
|
||||
CollapsibleContainer::new(element_id.clone(), expanded)
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::File).color(Color::Muted))
|
||||
.child(Label::new(excerpt.path.clone()).color(Color::Muted)),
|
||||
)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.toggle_expanded(element_id.clone(), cx);
|
||||
}))
|
||||
.child(
|
||||
div()
|
||||
.p_2()
|
||||
.rounded_md()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(excerpt.text.clone()),
|
||||
)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolOutput for ProjectIndexView {
|
||||
type Input = CodebaseQuery;
|
||||
type SerializedState = SerializedState;
|
||||
pub struct ProjectIndexTool {
|
||||
project_index: Model<ProjectIndex>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
fn generate(
|
||||
&self,
|
||||
context: &mut assistant_tooling::ProjectContext,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> String {
|
||||
match &self.state {
|
||||
ProjectIndexToolState::CollectingQuery => String::new(),
|
||||
ProjectIndexToolState::Searching => String::new(),
|
||||
ProjectIndexToolState::Error(error) => format!("failed to search: {error:?}"),
|
||||
ProjectIndexToolState::Finished {
|
||||
excerpts,
|
||||
index_status,
|
||||
} => {
|
||||
let mut body = "found results in the following paths:\n".to_string();
|
||||
|
||||
for (project_path, ranges) in excerpts {
|
||||
context.add_excerpts(project_path.clone(), ranges);
|
||||
writeln!(&mut body, "* {}", &project_path.path.display()).unwrap();
|
||||
}
|
||||
|
||||
if *index_status != Status::Idle {
|
||||
body.push_str("Still indexing. Results may be incomplete.\n");
|
||||
}
|
||||
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext<Self>) {
|
||||
self.input = input;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn execute(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
self.state = ProjectIndexToolState::Searching;
|
||||
cx.notify();
|
||||
|
||||
let project_index = self.project_index.read(cx);
|
||||
let index_status = project_index.status();
|
||||
let search = project_index.search(
|
||||
self.input.query.clone(),
|
||||
self.input.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let search_result = search.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
match search_result {
|
||||
Ok(search_results) => {
|
||||
let mut excerpts = BTreeMap::<ProjectPath, Vec<Range<usize>>>::new();
|
||||
for search_result in search_results {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: search_result.worktree.read(cx).id(),
|
||||
path: search_result.path,
|
||||
};
|
||||
excerpts
|
||||
.entry(project_path)
|
||||
.or_default()
|
||||
.push(search_result.range);
|
||||
}
|
||||
this.state = ProjectIndexToolState::Finished {
|
||||
excerpts,
|
||||
index_status,
|
||||
};
|
||||
}
|
||||
Err(error) => {
|
||||
this.state = ProjectIndexToolState::Error(error);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize(&self, cx: &mut ViewContext<Self>) -> Self::SerializedState {
|
||||
let mut serialized = SerializedState {
|
||||
error_message: None,
|
||||
index_status: Status::Idle,
|
||||
worktrees: Default::default(),
|
||||
};
|
||||
match &self.state {
|
||||
ProjectIndexToolState::Error(err) => serialized.error_message = Some(err.to_string()),
|
||||
ProjectIndexToolState::Finished {
|
||||
excerpts,
|
||||
index_status,
|
||||
} => {
|
||||
serialized.index_status = *index_status;
|
||||
if let Some(project) = self.project_index.read(cx).project().upgrade() {
|
||||
let project = project.read(cx);
|
||||
for (project_path, excerpts) in excerpts {
|
||||
if let Some(worktree) =
|
||||
project.worktree_for_id(project_path.worktree_id, cx)
|
||||
{
|
||||
let worktree_path = worktree.read(cx).abs_path();
|
||||
serialized
|
||||
.worktrees
|
||||
.entry(worktree_path)
|
||||
.or_default()
|
||||
.excerpts
|
||||
.insert(project_path.path.clone(), excerpts.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
serialized
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
serialized: Self::SerializedState,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Result<()> {
|
||||
if !serialized.worktrees.is_empty() {
|
||||
let mut excerpts = BTreeMap::<ProjectPath, Vec<Range<usize>>>::new();
|
||||
if let Some(project) = self.project_index.read(cx).project().upgrade() {
|
||||
let project = project.read(cx);
|
||||
for (worktree_path, worktree_state) in serialized.worktrees {
|
||||
if let Some(worktree) = project
|
||||
.worktrees()
|
||||
.find(|worktree| worktree.read(cx).abs_path() == worktree_path)
|
||||
{
|
||||
let worktree_id = worktree.read(cx).id();
|
||||
for (path, serialized_excerpts) in worktree_state.excerpts {
|
||||
excerpts.insert(ProjectPath { worktree_id, path }, serialized_excerpts);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.state = ProjectIndexToolState::Finished {
|
||||
excerpts,
|
||||
index_status: serialized.index_status,
|
||||
};
|
||||
}
|
||||
cx.notify();
|
||||
Ok(())
|
||||
}
|
||||
pub struct ProjectIndexOutput {
|
||||
excerpts: Vec<CodebaseExcerpt>,
|
||||
status: Status,
|
||||
}
|
||||
|
||||
impl ProjectIndexTool {
|
||||
pub fn new(project_index: Model<ProjectIndex>) -> Self {
|
||||
Self { project_index }
|
||||
pub fn new(project_index: Model<ProjectIndex>, fs: Arc<dyn Fs>) -> Self {
|
||||
// Listen for project index status and update the ProjectIndexTool directly
|
||||
|
||||
// TODO: setup a better description based on the user's current codebase.
|
||||
Self { project_index, fs }
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for ProjectIndexTool {
|
||||
type Input = CodebaseQuery;
|
||||
type Output = ProjectIndexOutput;
|
||||
type View = ProjectIndexView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
@@ -281,15 +135,133 @@ impl LanguageModelTool for ProjectIndexTool {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Semantic search against the user's current codebase, returning excerpts related to the query by computing a dot product against embeddings of code chunks in the code base and an embedding of the query.".to_string()
|
||||
"Semantic search against the user's current codebase, returning excerpts related to the query by computing a dot product against embeddings of chunks and an embedding of the query".to_string()
|
||||
}
|
||||
|
||||
fn view(&self, cx: &mut WindowContext) -> gpui::View<Self::View> {
|
||||
cx.new_view(|_| ProjectIndexView {
|
||||
state: ProjectIndexToolState::CollectingQuery,
|
||||
input: Default::default(),
|
||||
expanded_header: false,
|
||||
project_index: self.project_index.clone(),
|
||||
fn execute(&self, query: &Self::Input, cx: &mut WindowContext) -> Task<Result<Self::Output>> {
|
||||
let project_index = self.project_index.read(cx);
|
||||
|
||||
let status = project_index.status();
|
||||
let results = project_index.search(
|
||||
query.query.as_str(),
|
||||
query.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
|
||||
cx,
|
||||
);
|
||||
|
||||
let fs = self.fs.clone();
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
let results = results.await;
|
||||
|
||||
let excerpts = results.into_iter().map(|result| {
|
||||
let abs_path = result
|
||||
.worktree
|
||||
.read_with(&cx, |worktree, _| worktree.abs_path().join(&result.path));
|
||||
let fs = fs.clone();
|
||||
|
||||
async move {
|
||||
let path = result.path.clone();
|
||||
let text = fs.load(&abs_path?).await?;
|
||||
|
||||
let mut start = result.range.start;
|
||||
let mut end = result.range.end.min(text.len());
|
||||
while !text.is_char_boundary(start) {
|
||||
start += 1;
|
||||
}
|
||||
while !text.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
|
||||
anyhow::Ok(CodebaseExcerpt {
|
||||
element_id: ElementId::Name(nanoid::nanoid!().into()),
|
||||
expanded: false,
|
||||
path: path.to_string_lossy().to_string().into(),
|
||||
text: SharedString::from(text[start..end].to_string()),
|
||||
score: result.score,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
let excerpts = futures::future::join_all(excerpts)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|result| result.log_err())
|
||||
.collect();
|
||||
anyhow::Ok(ProjectIndexOutput { excerpts, status })
|
||||
})
|
||||
}
|
||||
|
||||
fn output_view(
|
||||
_tool_call_id: String,
|
||||
input: Self::Input,
|
||||
output: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> gpui::View<Self::View> {
|
||||
cx.new_view(|_cx| ProjectIndexView { input, output })
|
||||
}
|
||||
|
||||
fn status_view(&self, cx: &mut WindowContext) -> Option<AnyView> {
|
||||
Some(
|
||||
cx.new_view(|cx| ProjectIndexStatusView::new(self.project_index.clone(), cx))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
fn format(_input: &Self::Input, output: &Result<Self::Output>) -> String {
|
||||
match &output {
|
||||
Ok(output) => {
|
||||
let mut body = "Semantic search results:\n".to_string();
|
||||
|
||||
if output.status != Status::Idle {
|
||||
body.push_str("Still indexing. Results may be incomplete.\n");
|
||||
}
|
||||
|
||||
if output.excerpts.is_empty() {
|
||||
body.push_str("No results found");
|
||||
return body;
|
||||
}
|
||||
|
||||
for excerpt in &output.excerpts {
|
||||
body.push_str("Excerpt from ");
|
||||
body.push_str(excerpt.path.as_ref());
|
||||
body.push_str(", score ");
|
||||
body.push_str(&excerpt.score.to_string());
|
||||
body.push_str(":\n");
|
||||
body.push_str("~~~\n");
|
||||
body.push_str(excerpt.text.as_ref());
|
||||
body.push_str("~~~\n");
|
||||
}
|
||||
body
|
||||
}
|
||||
Err(err) => format!("Error: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ProjectIndexStatusView {
|
||||
project_index: Model<ProjectIndex>,
|
||||
}
|
||||
|
||||
impl ProjectIndexStatusView {
|
||||
pub fn new(project_index: Model<ProjectIndex>, cx: &mut ViewContext<Self>) -> Self {
|
||||
cx.subscribe(&project_index, |_this, _, _status: &Status, cx| {
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
Self { project_index }
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProjectIndexStatusView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let status = self.project_index.read(cx).status();
|
||||
|
||||
h_flex().gap_2().map(|element| match status {
|
||||
Status::Idle => element.child(Label::new("Project index ready")),
|
||||
Status::Loading => element.child(Label::new("Project index loading...")),
|
||||
Status::Scanning { remaining_count } => element.child(Label::new(format!(
|
||||
"Project index scanning: {remaining_count} remaining..."
|
||||
))),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,11 @@
|
||||
mod active_file_button;
|
||||
mod chat_message;
|
||||
mod chat_notice;
|
||||
mod composer;
|
||||
mod project_index_button;
|
||||
|
||||
#[cfg(feature = "stories")]
|
||||
mod stories;
|
||||
|
||||
pub use active_file_button::*;
|
||||
pub use chat_message::*;
|
||||
pub use chat_notice::*;
|
||||
pub use composer::*;
|
||||
pub use project_index_button::*;
|
||||
|
||||
#[cfg(feature = "stories")]
|
||||
pub use stories::*;
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
use crate::attachments::ActiveEditorAttachmentTool;
|
||||
use assistant_tooling::AttachmentRegistry;
|
||||
use editor::Editor;
|
||||
use gpui::{prelude::*, Subscription, View};
|
||||
use std::sync::Arc;
|
||||
use ui::{prelude::*, ButtonLike, Color, Icon, IconName, Tooltip};
|
||||
use workspace::Workspace;
|
||||
|
||||
#[derive(Clone)]
|
||||
enum Status {
|
||||
ActiveFile(String),
|
||||
#[allow(dead_code)]
|
||||
NoFile,
|
||||
}
|
||||
|
||||
pub struct ActiveFileButton {
|
||||
attachment_registry: Arc<AttachmentRegistry>,
|
||||
status: Status,
|
||||
#[allow(dead_code)]
|
||||
workspace_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl ActiveFileButton {
|
||||
pub fn new(
|
||||
attachment_registry: Arc<AttachmentRegistry>,
|
||||
workspace: View<Workspace>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let workspace_subscription = cx.subscribe(&workspace, Self::handle_workspace_event);
|
||||
|
||||
cx.defer(move |this, cx| this.update_active_buffer(workspace.clone(), cx));
|
||||
|
||||
Self {
|
||||
attachment_registry,
|
||||
status: Status::NoFile,
|
||||
workspace_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_enabled(&mut self, enabled: bool) {
|
||||
self.attachment_registry
|
||||
.set_attachment_tool_enabled::<ActiveEditorAttachmentTool>(enabled);
|
||||
}
|
||||
|
||||
pub fn update_active_buffer(&mut self, workspace: View<Workspace>, cx: &mut ViewContext<Self>) {
|
||||
let active_buffer = workspace
|
||||
.read(cx)
|
||||
.active_item(cx)
|
||||
.and_then(|item| Some(item.act_as::<Editor>(cx)?.read(cx).buffer().clone()));
|
||||
|
||||
if let Some(buffer) = active_buffer {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
if let Some(singleton) = buffer.as_singleton() {
|
||||
let singleton = singleton.read(cx);
|
||||
|
||||
let filename: String = singleton
|
||||
.file()
|
||||
.map(|file| file.path().to_string_lossy())
|
||||
.unwrap_or("Untitled".into())
|
||||
.into();
|
||||
|
||||
self.status = Status::ActiveFile(filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_workspace_event(
|
||||
&mut self,
|
||||
workspace: View<Workspace>,
|
||||
event: &workspace::Event,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let workspace::Event::ActiveItemChanged = event {
|
||||
self.update_active_buffer(workspace, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ActiveFileButton {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let is_enabled = self
|
||||
.attachment_registry
|
||||
.is_attachment_tool_enabled::<ActiveEditorAttachmentTool>();
|
||||
|
||||
let icon = if is_enabled {
|
||||
Icon::new(IconName::File)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Default)
|
||||
} else {
|
||||
Icon::new(IconName::File)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Disabled)
|
||||
};
|
||||
|
||||
let indicator = None;
|
||||
|
||||
let status = self.status.clone();
|
||||
|
||||
ButtonLike::new("active-file-button")
|
||||
.child(
|
||||
ui::IconWithIndicator::new(icon, indicator)
|
||||
.indicator_border_color(Some(gpui::transparent_black())),
|
||||
)
|
||||
.tooltip({
|
||||
move |cx| {
|
||||
let status = status.clone();
|
||||
let (tooltip, meta) = match (is_enabled, status) {
|
||||
(false, _) => (
|
||||
"Active file disabled".to_string(),
|
||||
Some("Click to enable".to_string()),
|
||||
),
|
||||
(true, Status::ActiveFile(filename)) => (
|
||||
format!("Active file {filename} enabled"),
|
||||
Some("Click to disable".to_string()),
|
||||
),
|
||||
(true, Status::NoFile) => {
|
||||
("No file active for conversation".to_string(), None)
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(meta) = meta {
|
||||
Tooltip::with_meta(tooltip, None, meta, cx)
|
||||
} else {
|
||||
Tooltip::text(tooltip, cx)
|
||||
}
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.set_enabled(!is_enabled);
|
||||
cx.notify();
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use client::User;
|
||||
use gpui::{hsla, AnyElement, ClickEvent};
|
||||
use ui::{prelude::*, Avatar, Tooltip};
|
||||
use gpui::{AnyElement, ClickEvent};
|
||||
use ui::{prelude::*, Avatar};
|
||||
|
||||
use crate::MessageId;
|
||||
|
||||
@@ -15,8 +15,7 @@ pub enum UserOrAssistant {
|
||||
pub struct ChatMessage {
|
||||
id: MessageId,
|
||||
player: UserOrAssistant,
|
||||
messages: Vec<AnyElement>,
|
||||
selected: bool,
|
||||
message: Option<AnyElement>,
|
||||
collapsed: bool,
|
||||
on_collapse_handle_click: Box<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>,
|
||||
}
|
||||
@@ -25,44 +24,83 @@ impl ChatMessage {
|
||||
pub fn new(
|
||||
id: MessageId,
|
||||
player: UserOrAssistant,
|
||||
messages: Vec<AnyElement>,
|
||||
message: Option<AnyElement>,
|
||||
collapsed: bool,
|
||||
on_collapse_handle_click: Box<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
player,
|
||||
messages,
|
||||
selected: false,
|
||||
message,
|
||||
collapsed,
|
||||
on_collapse_handle_click,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Selectable for ChatMessage {
|
||||
fn selected(mut self, selected: bool) -> Self {
|
||||
self.selected = selected;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ChatMessage {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let message_group = SharedString::from(format!("{}_group", self.id.0));
|
||||
|
||||
let collapse_handle_id = SharedString::from(format!("{}_collapse_handle", self.id.0));
|
||||
let collapse_handle = h_flex()
|
||||
.id(collapse_handle_id.clone())
|
||||
.group(collapse_handle_id.clone())
|
||||
.flex_none()
|
||||
.justify_center()
|
||||
.w_1()
|
||||
.mx_2()
|
||||
.h_full()
|
||||
.on_click(self.on_collapse_handle_click)
|
||||
.child(
|
||||
div()
|
||||
.w_px()
|
||||
.h_full()
|
||||
.rounded_lg()
|
||||
.overflow_hidden()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.group_hover(collapse_handle_id, |this| {
|
||||
this.bg(cx.theme().colors().element_hover)
|
||||
}),
|
||||
);
|
||||
|
||||
let content_padding = Spacing::Small.rems(cx);
|
||||
let content_padding = rems(1.);
|
||||
// Clamp the message height to exactly 1.5 lines when collapsed.
|
||||
let collapsed_height = content_padding.to_pixels(cx.rem_size()) + cx.line_height() * 1.5;
|
||||
|
||||
let background_color = if let UserOrAssistant::User(_) = &self.player {
|
||||
Some(cx.theme().colors().surface_background)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let content = self.message.map(|message| {
|
||||
div()
|
||||
.overflow_hidden()
|
||||
.w_full()
|
||||
.p(content_padding)
|
||||
.rounded_lg()
|
||||
.when(self.collapsed, |this| this.h(collapsed_height))
|
||||
.bg(cx.theme().colors().surface_background)
|
||||
.child(message)
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(ChatMessageHeader::new(self.player))
|
||||
.child(h_flex().gap_3().child(collapse_handle).children(content))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement)]
|
||||
struct ChatMessageHeader {
|
||||
player: UserOrAssistant,
|
||||
contexts: Vec<()>,
|
||||
}
|
||||
|
||||
impl ChatMessageHeader {
|
||||
fn new(player: UserOrAssistant) -> Self {
|
||||
Self {
|
||||
player,
|
||||
contexts: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ChatMessageHeader {
|
||||
fn render(self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
let (username, avatar_uri) = match self.player {
|
||||
UserOrAssistant::Assistant => (
|
||||
"Assistant".into(),
|
||||
@@ -74,67 +112,23 @@ impl RenderOnce for ChatMessage {
|
||||
UserOrAssistant::User(None) => ("You".into(), None),
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.group(message_group.clone())
|
||||
.gap(Spacing::XSmall.rems(cx))
|
||||
.p(Spacing::XSmall.rems(cx))
|
||||
.when(self.selected, |element| {
|
||||
element.bg(hsla(0.6, 0.67, 0.46, 0.12))
|
||||
})
|
||||
.rounded_lg()
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.px(content_padding)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.map(|this| {
|
||||
let avatar_size = rems_from_px(20.);
|
||||
if let Some(avatar_uri) = avatar_uri {
|
||||
this.child(Avatar::new(avatar_uri).size(avatar_size))
|
||||
} else {
|
||||
this.child(div().size(avatar_size))
|
||||
}
|
||||
})
|
||||
.child(Label::new(username).color(Color::Muted)),
|
||||
)
|
||||
.child(
|
||||
h_flex().visible_on_hover(message_group).child(
|
||||
// temp icons
|
||||
IconButton::new(
|
||||
collapse_handle_id.clone(),
|
||||
if self.collapsed {
|
||||
IconName::ArrowUp
|
||||
} else {
|
||||
IconName::ArrowDown
|
||||
},
|
||||
)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(self.on_collapse_handle_click)
|
||||
.tooltip(|cx| Tooltip::text("Collapse Message", cx)),
|
||||
),
|
||||
),
|
||||
.gap_3()
|
||||
.map(|this| {
|
||||
let avatar_size = rems(20.0 / 16.0);
|
||||
if let Some(avatar_uri) = avatar_uri {
|
||||
this.child(Avatar::new(avatar_uri).size(avatar_size))
|
||||
} else {
|
||||
this.child(div().size(avatar_size))
|
||||
}
|
||||
})
|
||||
.child(Label::new(username).color(Color::Default)),
|
||||
)
|
||||
.when(self.messages.len() > 0, |el| {
|
||||
el.child(
|
||||
h_flex().child(
|
||||
v_flex()
|
||||
.relative()
|
||||
.overflow_hidden()
|
||||
.w_full()
|
||||
.p(content_padding)
|
||||
.gap_3()
|
||||
.text_ui(cx)
|
||||
.rounded_lg()
|
||||
.when_some(background_color, |this, background_color| {
|
||||
this.bg(background_color)
|
||||
})
|
||||
.when(self.collapsed, |this| this.h(collapsed_height))
|
||||
.children(self.messages),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(div().when(!self.contexts.is_empty(), |this| {
|
||||
this.child(Label::new(self.contexts.len().to_string()).color(Color::Muted))
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
use ui::{prelude::*, Avatar, IconButtonShape};
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ChatNotice {
|
||||
message: SharedString,
|
||||
meta: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ChatNotice {
|
||||
pub fn new(message: impl Into<SharedString>) -> Self {
|
||||
Self {
|
||||
message: message.into(),
|
||||
meta: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn meta(mut self, meta: impl Into<SharedString>) -> Self {
|
||||
self.meta = Some(meta.into());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ChatNotice {
|
||||
fn render(self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.items_start()
|
||||
.mt_4()
|
||||
.gap_3()
|
||||
.child(
|
||||
// TODO: Replace with question mark.
|
||||
Avatar::new("https://zed.dev/assistant_avatar.png").size(rems_from_px(20.)),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.size_full()
|
||||
.gap_1()
|
||||
.pr_4()
|
||||
.overflow_hidden()
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.overflow_hidden()
|
||||
.child(
|
||||
h_flex()
|
||||
.flex_none()
|
||||
.overflow_hidden()
|
||||
.child(Label::new(self.message)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.flex_shrink_0()
|
||||
.gap_1()
|
||||
.child(Button::new("allow", "Allow"))
|
||||
.child(
|
||||
IconButton::new("deny", IconName::Close)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall),
|
||||
),
|
||||
),
|
||||
)
|
||||
.children(
|
||||
self.meta.map(|meta| {
|
||||
Label::new(meta).size(LabelSize::Small).color(Color::Muted)
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,119 +1,154 @@
|
||||
use crate::{
|
||||
ui::{ActiveFileButton, ProjectIndexButton},
|
||||
AssistantChat, CompletionProvider,
|
||||
};
|
||||
use assistant_tooling::ToolRegistry;
|
||||
use client::User;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use gpui::{AnyElement, FontStyle, FontWeight, TextStyle, View, WeakView, WhiteSpace};
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{popover_menu, prelude::*, ButtonLike, ContextMenu, Divider, TextSize, Tooltip};
|
||||
use ui::{popover_menu, prelude::*, Avatar, ButtonLike, ContextMenu, Tooltip};
|
||||
|
||||
use crate::{AssistantChat, CompletionProvider, Submit, SubmitMode};
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct Composer {
|
||||
editor: View<Editor>,
|
||||
project_index_button: View<ProjectIndexButton>,
|
||||
active_file_button: Option<View<ActiveFileButton>>,
|
||||
player: Option<Arc<User>>,
|
||||
can_submit: bool,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
model_selector: AnyElement,
|
||||
}
|
||||
|
||||
impl Composer {
|
||||
pub fn new(
|
||||
editor: View<Editor>,
|
||||
project_index_button: View<ProjectIndexButton>,
|
||||
active_file_button: Option<View<ActiveFileButton>>,
|
||||
player: Option<Arc<User>>,
|
||||
can_submit: bool,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
model_selector: AnyElement,
|
||||
) -> Self {
|
||||
Self {
|
||||
editor,
|
||||
project_index_button,
|
||||
active_file_button,
|
||||
player,
|
||||
can_submit,
|
||||
tool_registry,
|
||||
model_selector,
|
||||
}
|
||||
}
|
||||
|
||||
fn render_tools(&mut self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
h_flex().child(self.project_index_button.clone())
|
||||
}
|
||||
|
||||
fn render_attachment_tools(&mut self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
h_flex().children(
|
||||
self.active_file_button
|
||||
.clone()
|
||||
.map(|view| view.into_any_element()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for Composer {
|
||||
fn render(mut self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let font_size = TextSize::Default.rems(cx);
|
||||
let line_height = font_size.to_pixels(cx.rem_size()) * 1.3;
|
||||
let mut editor_border = cx.theme().colors().text;
|
||||
editor_border.fade_out(0.90);
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let mut player_avatar = div().size(rems(20.0 / 16.0)).into_any_element();
|
||||
if let Some(player) = self.player.clone() {
|
||||
player_avatar = Avatar::new(player.avatar_uri.clone())
|
||||
.size(rems(20.0 / 16.0))
|
||||
.into_any_element();
|
||||
}
|
||||
|
||||
// Remove the extra 1px added by the border
|
||||
let padding = Spacing::XLarge.rems(cx) - rems_from_px(1.);
|
||||
let font_size = rems(0.875);
|
||||
let line_height = font_size.to_pixels(cx.rem_size()) * 1.3;
|
||||
|
||||
h_flex()
|
||||
.p(Spacing::Small.rems(cx))
|
||||
.w_full()
|
||||
.items_start()
|
||||
.mt_4()
|
||||
.gap_3()
|
||||
.child(player_avatar)
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.rounded_lg()
|
||||
.p(padding)
|
||||
.border_1()
|
||||
.border_color(editor_border)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.size_full()
|
||||
.gap_1()
|
||||
.pr_4()
|
||||
.child(
|
||||
v_flex()
|
||||
.justify_between()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.child({
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().editor_foreground,
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: font_size.into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: line_height.into(),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
};
|
||||
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
EditorStyle {
|
||||
background: cx.theme().colors().editor_background,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
})
|
||||
.p_4()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_lg()
|
||||
.child(
|
||||
h_flex()
|
||||
.flex_none()
|
||||
.gap_2()
|
||||
v_flex()
|
||||
.justify_between()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.min_h(line_height * 4 + px(74.0))
|
||||
.child({
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().editor_foreground,
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: font_size.into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: line_height.into(),
|
||||
background_color: None,
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
white_space: WhiteSpace::Normal,
|
||||
};
|
||||
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
EditorStyle {
|
||||
background: cx.theme().colors().editor_background,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex().gap_1().child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(self.render_tools(cx))
|
||||
.child(Divider::vertical())
|
||||
.child(self.render_attachment_tools(cx)),
|
||||
),
|
||||
)
|
||||
.child(h_flex().gap_1().child(self.model_selector)),
|
||||
h_flex()
|
||||
.flex_none()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.w_full()
|
||||
.child(
|
||||
h_flex().gap_1().child(
|
||||
// IconButton/button
|
||||
// Toggle - if enabled, .selected(true).selected_style(IconButtonStyle::Filled)
|
||||
//
|
||||
// match status
|
||||
// Tooltip::with_meta("some label explaining project index + status", "click to enable")
|
||||
IconButton::new(
|
||||
"add-context",
|
||||
IconName::FileDoc,
|
||||
)
|
||||
.icon_color(Color::Muted),
|
||||
), // .child(
|
||||
// IconButton::new(
|
||||
// "add-context",
|
||||
// IconName::Plus,
|
||||
// )
|
||||
// .icon_color(Color::Muted),
|
||||
// ),
|
||||
)
|
||||
.child(
|
||||
Button::new("send-button", "Send")
|
||||
.style(ButtonStyle::Filled)
|
||||
.disabled(!self.can_submit)
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(Submit(
|
||||
SubmitMode::Codebase,
|
||||
)))
|
||||
})
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action(
|
||||
"Submit message",
|
||||
&Submit(SubmitMode::Codebase),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(self.model_selector)
|
||||
.children(self.tool_registry.status_views().iter().cloned()),
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -149,7 +184,7 @@ impl RenderOnce for ModelSelector {
|
||||
let assistant_chat = self.assistant_chat.clone();
|
||||
move |cx| {
|
||||
_ = assistant_chat.update(cx, |assistant_chat, cx| {
|
||||
assistant_chat.model.clone_from(&model);
|
||||
assistant_chat.model = model.clone();
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
@@ -171,18 +206,10 @@ impl RenderOnce for ModelSelector {
|
||||
.overflow_x_hidden()
|
||||
.flex_grow()
|
||||
.whitespace_nowrap()
|
||||
.child(
|
||||
Label::new(self.model)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
.child(Label::new(self.model)),
|
||||
)
|
||||
.child(
|
||||
div().child(
|
||||
Icon::new(IconName::ChevronDown)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::XSmall),
|
||||
),
|
||||
div().child(Icon::new(IconName::ChevronDown).color(Color::Muted)),
|
||||
),
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
use assistant_tooling::ToolRegistry;
|
||||
use gpui::{percentage, prelude::*, Animation, AnimationExt, Model, Transformation};
|
||||
use semantic_index::{ProjectIndex, Status};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, Color, Icon, IconName, Indicator, Tooltip};
|
||||
|
||||
use crate::tools::ProjectIndexTool;
|
||||
|
||||
pub struct ProjectIndexButton {
|
||||
project_index: Model<ProjectIndex>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
}
|
||||
|
||||
impl ProjectIndexButton {
|
||||
pub fn new(
|
||||
project_index: Model<ProjectIndex>,
|
||||
tool_registry: Arc<ToolRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
cx.subscribe(&project_index, |_this, _, _status: &Status, cx| {
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
Self {
|
||||
project_index,
|
||||
tool_registry,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_enabled(&mut self, enabled: bool) {
|
||||
self.tool_registry
|
||||
.set_tool_enabled::<ProjectIndexTool>(enabled);
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProjectIndexButton {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let status = self.project_index.read(cx).status();
|
||||
let is_enabled = self.tool_registry.is_tool_enabled::<ProjectIndexTool>();
|
||||
|
||||
let icon = if is_enabled {
|
||||
match status {
|
||||
Status::Idle => Icon::new(IconName::Code)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Default),
|
||||
Status::Loading => Icon::new(IconName::Code)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
Status::Scanning { .. } => Icon::new(IconName::Code)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
}
|
||||
} else {
|
||||
Icon::new(IconName::Code)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Disabled)
|
||||
};
|
||||
|
||||
let indicator = if is_enabled {
|
||||
match status {
|
||||
Status::Idle => Some(Indicator::dot().color(Color::Success)),
|
||||
Status::Scanning { .. } => Some(Indicator::dot().color(Color::Warning)),
|
||||
Status::Loading => Some(Indicator::icon(
|
||||
Icon::new(IconName::Spinner)
|
||||
.color(Color::Accent)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
|
||||
),
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
ButtonLike::new("project-index")
|
||||
.child(
|
||||
ui::IconWithIndicator::new(icon, indicator)
|
||||
.indicator_border_color(Some(gpui::transparent_black())),
|
||||
)
|
||||
.tooltip({
|
||||
move |cx| {
|
||||
let (tooltip, meta) = match (is_enabled, status) {
|
||||
(false, _) => (
|
||||
"Project index disabled".to_string(),
|
||||
Some("Click to enable".to_string()),
|
||||
),
|
||||
(_, Status::Idle) => (
|
||||
"Project index ready".to_string(),
|
||||
Some("Click to disable".to_string()),
|
||||
),
|
||||
(_, Status::Loading) => ("Project index loading...".to_string(), None),
|
||||
(_, Status::Scanning { remaining_count }) => (
|
||||
"Project index scanning...".to_string(),
|
||||
Some(format!("{} remaining...", remaining_count)),
|
||||
),
|
||||
};
|
||||
|
||||
if let Some(meta) = meta {
|
||||
Tooltip::with_meta(tooltip, None, meta, cx)
|
||||
} else {
|
||||
Tooltip::text(tooltip, cx)
|
||||
}
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.set_enabled(!is_enabled);
|
||||
cx.notify();
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
mod chat_message;
|
||||
mod chat_notice;
|
||||
|
||||
pub use chat_message::*;
|
||||
pub use chat_notice::*;
|
||||
|
||||
@@ -28,7 +28,7 @@ impl Render for ChatMessageStory {
|
||||
ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::User(Some(user_1.clone())),
|
||||
vec![div().child("What can I do here?").into_any_element()],
|
||||
Some(div().child("What can I do here?").into_any_element()),
|
||||
false,
|
||||
Box::new(|_, _| {}),
|
||||
),
|
||||
@@ -38,7 +38,7 @@ impl Render for ChatMessageStory {
|
||||
ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::User(Some(user_1.clone())),
|
||||
vec![div().child("What can I do here?").into_any_element()],
|
||||
Some(div().child("What can I do here?").into_any_element()),
|
||||
true,
|
||||
Box::new(|_, _| {}),
|
||||
),
|
||||
@@ -51,7 +51,7 @@ impl Render for ChatMessageStory {
|
||||
ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::Assistant,
|
||||
vec![div().child("You can talk to me!").into_any_element()],
|
||||
Some(div().child("You can talk to me!").into_any_element()),
|
||||
false,
|
||||
Box::new(|_, _| {}),
|
||||
),
|
||||
@@ -61,7 +61,7 @@ impl Render for ChatMessageStory {
|
||||
ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::Assistant,
|
||||
vec![div().child(MULTI_LINE_MESSAGE).into_any_element()],
|
||||
Some(div().child(MULTI_LINE_MESSAGE).into_any_element()),
|
||||
true,
|
||||
Box::new(|_, _| {}),
|
||||
),
|
||||
@@ -75,21 +75,21 @@ impl Render for ChatMessageStory {
|
||||
.child(ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::User(Some(user_1.clone())),
|
||||
vec![div().child("What is Rust??").into_any_element()],
|
||||
Some(div().child("What is Rust??").into_any_element()),
|
||||
false,
|
||||
Box::new(|_, _| {}),
|
||||
))
|
||||
.child(ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::Assistant,
|
||||
vec![div().child("Rust is a multi-paradigm programming language focused on performance and safety").into_any_element()],
|
||||
Some(div().child("Rust is a multi-paradigm programming language focused on performance and safety").into_any_element()),
|
||||
false,
|
||||
Box::new(|_, _| {}),
|
||||
))
|
||||
.child(ChatMessage::new(
|
||||
MessageId(0),
|
||||
UserOrAssistant::User(Some(user_1)),
|
||||
vec![div().child("Sounds pretty cool!").into_any_element()],
|
||||
Some(div().child("Sounds pretty cool!").into_any_element()),
|
||||
false,
|
||||
Box::new(|_, _| {}),
|
||||
)),
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
use story::{StoryContainer, StoryItem, StorySection};
|
||||
use ui::prelude::*;
|
||||
|
||||
use crate::ui::ChatNotice;
|
||||
|
||||
pub struct ChatNoticeStory;
|
||||
|
||||
impl Render for ChatNoticeStory {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
StoryContainer::new(
|
||||
"ChatNotice Story",
|
||||
"crates/assistant2/src/ui/stories/chat_notice.rs",
|
||||
)
|
||||
.child(
|
||||
StorySection::new().child(StoryItem::new(
|
||||
"Project index request",
|
||||
ChatNotice::new("Allow assistant to index your project?")
|
||||
.meta("Enabling will allow responses more relevant to this project."),
|
||||
)),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -13,21 +13,10 @@ path = "src/assistant_tooling.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
log.workspace = true
|
||||
project.workspace = true
|
||||
repair_json.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sum_tree.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
mod attachment_registry;
|
||||
mod project_context;
|
||||
mod tool_registry;
|
||||
pub mod registry;
|
||||
pub mod tool;
|
||||
|
||||
pub use attachment_registry::{
|
||||
AttachmentOutput, AttachmentRegistry, LanguageModelAttachment, SavedUserAttachment,
|
||||
UserAttachment,
|
||||
};
|
||||
pub use project_context::ProjectContext;
|
||||
pub use tool_registry::{
|
||||
LanguageModelTool, SavedToolFunctionCall, ToolFunctionCall, ToolFunctionDefinition, ToolOutput,
|
||||
ToolRegistry,
|
||||
};
|
||||
pub use crate::registry::ToolRegistry;
|
||||
pub use crate::tool::{LanguageModelTool, ToolFunctionCall, ToolFunctionDefinition};
|
||||
|
||||
@@ -1,234 +0,0 @@
|
||||
use crate::ProjectContext;
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::HashMap;
|
||||
use futures::future::join_all;
|
||||
use gpui::{AnyView, Render, Task, View, WindowContext};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub struct AttachmentRegistry {
|
||||
registered_attachments: HashMap<TypeId, RegisteredAttachment>,
|
||||
}
|
||||
|
||||
pub trait AttachmentOutput {
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String;
|
||||
}
|
||||
|
||||
pub trait LanguageModelAttachment {
|
||||
type Output: DeserializeOwned + Serialize + 'static;
|
||||
type View: Render + AttachmentOutput;
|
||||
|
||||
fn name(&self) -> Arc<str>;
|
||||
fn run(&self, cx: &mut WindowContext) -> Task<Result<Self::Output>>;
|
||||
fn view(&self, output: Result<Self::Output>, cx: &mut WindowContext) -> View<Self::View>;
|
||||
}
|
||||
|
||||
/// A collected attachment from running an attachment tool
|
||||
pub struct UserAttachment {
|
||||
pub view: AnyView,
|
||||
name: Arc<str>,
|
||||
serialized_output: Result<Box<RawValue>, String>,
|
||||
generate_fn: fn(AnyView, &mut ProjectContext, cx: &mut WindowContext) -> String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedUserAttachment {
|
||||
name: Arc<str>,
|
||||
serialized_output: Result<Box<RawValue>, String>,
|
||||
}
|
||||
|
||||
/// Internal representation of an attachment tool to allow us to treat them dynamically
|
||||
struct RegisteredAttachment {
|
||||
name: Arc<str>,
|
||||
enabled: AtomicBool,
|
||||
call: Box<dyn Fn(&mut WindowContext) -> Task<Result<UserAttachment>>>,
|
||||
deserialize: Box<dyn Fn(&SavedUserAttachment, &mut WindowContext) -> Result<UserAttachment>>,
|
||||
}
|
||||
|
||||
impl AttachmentRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
registered_attachments: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register<A: LanguageModelAttachment + 'static>(&mut self, attachment: A) {
|
||||
let attachment = Arc::new(attachment);
|
||||
|
||||
let call = Box::new({
|
||||
let attachment = attachment.clone();
|
||||
move |cx: &mut WindowContext| {
|
||||
let result = attachment.run(cx);
|
||||
let attachment = attachment.clone();
|
||||
cx.spawn(move |mut cx| async move {
|
||||
let result: Result<A::Output> = result.await;
|
||||
let serialized_output =
|
||||
result
|
||||
.as_ref()
|
||||
.map_err(ToString::to_string)
|
||||
.and_then(|output| {
|
||||
Ok(RawValue::from_string(
|
||||
serde_json::to_string(output).map_err(|e| e.to_string())?,
|
||||
)
|
||||
.unwrap())
|
||||
});
|
||||
|
||||
let view = cx.update(|cx| attachment.view(result, cx))?;
|
||||
|
||||
Ok(UserAttachment {
|
||||
name: attachment.name(),
|
||||
view: view.into(),
|
||||
generate_fn: generate::<A>,
|
||||
serialized_output,
|
||||
})
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
let deserialize = Box::new({
|
||||
let attachment = attachment.clone();
|
||||
move |saved_attachment: &SavedUserAttachment, cx: &mut WindowContext| {
|
||||
let serialized_output = saved_attachment.serialized_output.clone();
|
||||
let output = match &serialized_output {
|
||||
Ok(serialized_output) => {
|
||||
Ok(serde_json::from_str::<A::Output>(serialized_output.get())?)
|
||||
}
|
||||
Err(error) => Err(anyhow!("{error}")),
|
||||
};
|
||||
let view = attachment.view(output, cx).into();
|
||||
|
||||
Ok(UserAttachment {
|
||||
name: saved_attachment.name.clone(),
|
||||
view,
|
||||
serialized_output,
|
||||
generate_fn: generate::<A>,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
self.registered_attachments.insert(
|
||||
TypeId::of::<A>(),
|
||||
RegisteredAttachment {
|
||||
name: attachment.name(),
|
||||
call,
|
||||
deserialize,
|
||||
enabled: AtomicBool::new(true),
|
||||
},
|
||||
);
|
||||
return;
|
||||
|
||||
fn generate<T: LanguageModelAttachment>(
|
||||
view: AnyView,
|
||||
project: &mut ProjectContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> String {
|
||||
view.downcast::<T::View>()
|
||||
.unwrap()
|
||||
.update(cx, |view, cx| T::View::generate(view, project, cx))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_attachment_tool_enabled<A: LanguageModelAttachment + 'static>(
|
||||
&self,
|
||||
is_enabled: bool,
|
||||
) {
|
||||
if let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) {
|
||||
attachment.enabled.store(is_enabled, SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_attachment_tool_enabled<A: LanguageModelAttachment + 'static>(&self) -> bool {
|
||||
if let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) {
|
||||
attachment.enabled.load(SeqCst)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn call<A: LanguageModelAttachment + 'static>(
|
||||
&self,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<UserAttachment>> {
|
||||
let Some(attachment) = self.registered_attachments.get(&TypeId::of::<A>()) else {
|
||||
return Task::ready(Err(anyhow!("no attachment tool")));
|
||||
};
|
||||
|
||||
(attachment.call)(cx)
|
||||
}
|
||||
|
||||
pub fn call_all_attachment_tools(
|
||||
self: Arc<Self>,
|
||||
cx: &mut WindowContext<'_>,
|
||||
) -> Task<Result<Vec<UserAttachment>>> {
|
||||
let this = self.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let attachment_tasks = cx.update(|cx| {
|
||||
let mut tasks = Vec::new();
|
||||
for attachment in this
|
||||
.registered_attachments
|
||||
.values()
|
||||
.filter(|attachment| attachment.enabled.load(SeqCst))
|
||||
{
|
||||
tasks.push((attachment.call)(cx))
|
||||
}
|
||||
|
||||
tasks
|
||||
})?;
|
||||
|
||||
let attachments = join_all(attachment_tasks.into_iter()).await;
|
||||
|
||||
Ok(attachments
|
||||
.into_iter()
|
||||
.filter_map(|attachment| attachment.log_err())
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn serialize_user_attachment(
|
||||
&self,
|
||||
user_attachment: &UserAttachment,
|
||||
) -> SavedUserAttachment {
|
||||
SavedUserAttachment {
|
||||
name: user_attachment.name.clone(),
|
||||
serialized_output: user_attachment.serialized_output.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deserialize_user_attachment(
|
||||
&self,
|
||||
saved_user_attachment: SavedUserAttachment,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<UserAttachment> {
|
||||
if let Some(registered_attachment) = self
|
||||
.registered_attachments
|
||||
.values()
|
||||
.find(|attachment| attachment.name == saved_user_attachment.name)
|
||||
{
|
||||
(registered_attachment.deserialize)(&saved_user_attachment, cx)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"no attachment tool for name {}",
|
||||
saved_user_attachment.name
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl UserAttachment {
|
||||
pub fn generate(&self, output: &mut ProjectContext, cx: &mut WindowContext) -> Option<String> {
|
||||
let result = (self.generate_fn)(self.view.clone(), output, cx);
|
||||
if result.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(result)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AppContext, Model, Task, WeakModel};
|
||||
use project::{Fs, Project, ProjectPath, Worktree};
|
||||
use std::{cmp::Ordering, fmt::Write as _, ops::Range, sync::Arc};
|
||||
use sum_tree::TreeMap;
|
||||
|
||||
pub struct ProjectContext {
|
||||
files: TreeMap<ProjectPath, PathState>,
|
||||
project: WeakModel<Project>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum PathState {
|
||||
PathOnly,
|
||||
EntireFile,
|
||||
Excerpts { ranges: Vec<Range<usize>> },
|
||||
}
|
||||
|
||||
impl ProjectContext {
|
||||
pub fn new(project: WeakModel<Project>, fs: Arc<dyn Fs>) -> Self {
|
||||
Self {
|
||||
files: TreeMap::default(),
|
||||
fs,
|
||||
project,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_path(&mut self, project_path: ProjectPath) {
|
||||
if self.files.get(&project_path).is_none() {
|
||||
self.files.insert(project_path, PathState::PathOnly);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_excerpts(&mut self, project_path: ProjectPath, new_ranges: &[Range<usize>]) {
|
||||
let previous_state = self
|
||||
.files
|
||||
.get(&project_path)
|
||||
.unwrap_or(&PathState::PathOnly);
|
||||
|
||||
let mut ranges = match previous_state {
|
||||
PathState::EntireFile => return,
|
||||
PathState::PathOnly => Vec::new(),
|
||||
PathState::Excerpts { ranges } => ranges.to_vec(),
|
||||
};
|
||||
|
||||
for new_range in new_ranges {
|
||||
let ix = ranges.binary_search_by(|probe| {
|
||||
if probe.end < new_range.start {
|
||||
Ordering::Less
|
||||
} else if probe.start > new_range.end {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
|
||||
match ix {
|
||||
Ok(mut ix) => {
|
||||
let existing = &mut ranges[ix];
|
||||
existing.start = existing.start.min(new_range.start);
|
||||
existing.end = existing.end.max(new_range.end);
|
||||
while ix + 1 < ranges.len() && ranges[ix + 1].start <= ranges[ix].end {
|
||||
ranges[ix].end = ranges[ix].end.max(ranges[ix + 1].end);
|
||||
ranges.remove(ix + 1);
|
||||
}
|
||||
while ix > 0 && ranges[ix - 1].end >= ranges[ix].start {
|
||||
ranges[ix].start = ranges[ix].start.min(ranges[ix - 1].start);
|
||||
ranges.remove(ix - 1);
|
||||
ix -= 1;
|
||||
}
|
||||
}
|
||||
Err(ix) => {
|
||||
ranges.insert(ix, new_range.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.files
|
||||
.insert(project_path, PathState::Excerpts { ranges });
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, project_path: ProjectPath) {
|
||||
self.files.insert(project_path, PathState::EntireFile);
|
||||
}
|
||||
|
||||
pub fn generate_system_message(&self, cx: &mut AppContext) -> Task<Result<String>> {
|
||||
let project = self
|
||||
.project
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("project dropped"));
|
||||
let files = self.files.clone();
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(|cx| async move {
|
||||
let project = project?;
|
||||
let mut result = "project structure:\n".to_string();
|
||||
|
||||
let mut last_worktree: Option<Model<Worktree>> = None;
|
||||
for (project_path, path_state) in files.iter() {
|
||||
if let Some(worktree) = &last_worktree {
|
||||
if worktree.read_with(&cx, |tree, _| tree.id())? != project_path.worktree_id {
|
||||
last_worktree = None;
|
||||
}
|
||||
}
|
||||
|
||||
let worktree;
|
||||
if let Some(last_worktree) = &last_worktree {
|
||||
worktree = last_worktree.clone();
|
||||
} else if let Some(tree) = project.read_with(&cx, |project, cx| {
|
||||
project.worktree_for_id(project_path.worktree_id, cx)
|
||||
})? {
|
||||
worktree = tree;
|
||||
last_worktree = Some(worktree.clone());
|
||||
let worktree_name =
|
||||
worktree.read_with(&cx, |tree, _cx| tree.root_name().to_string())?;
|
||||
writeln!(&mut result, "# {}", worktree_name).unwrap();
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
let worktree_abs_path = worktree.read_with(&cx, |tree, _cx| tree.abs_path())?;
|
||||
let path = &project_path.path;
|
||||
writeln!(&mut result, "## {}", path.display()).unwrap();
|
||||
|
||||
match path_state {
|
||||
PathState::PathOnly => {}
|
||||
PathState::EntireFile => {
|
||||
let text = fs.load(&worktree_abs_path.join(&path)).await?;
|
||||
writeln!(&mut result, "~~~\n{text}\n~~~").unwrap();
|
||||
}
|
||||
PathState::Excerpts { ranges } => {
|
||||
let text = fs.load(&worktree_abs_path.join(&path)).await?;
|
||||
|
||||
writeln!(&mut result, "~~~").unwrap();
|
||||
|
||||
// Assumption: ranges are in order, not overlapping
|
||||
let mut prev_range_end = 0;
|
||||
for range in ranges {
|
||||
if range.start > prev_range_end {
|
||||
writeln!(&mut result, "...").unwrap();
|
||||
prev_range_end = range.end;
|
||||
}
|
||||
|
||||
let mut start = range.start;
|
||||
let mut end = range.end.min(text.len());
|
||||
while !text.is_char_boundary(start) {
|
||||
start += 1;
|
||||
}
|
||||
while !text.is_char_boundary(end) {
|
||||
end -= 1;
|
||||
}
|
||||
result.push_str(&text[start..end]);
|
||||
if !result.ends_with('\n') {
|
||||
result.push('\n');
|
||||
}
|
||||
}
|
||||
|
||||
if prev_range_end < text.len() {
|
||||
writeln!(&mut result, "...").unwrap();
|
||||
}
|
||||
|
||||
writeln!(&mut result, "~~~").unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
use gpui::TestAppContext;
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use unindent::Unindent as _;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_system_message_generation(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let file_3_contents = r#"
|
||||
fn test1() {}
|
||||
fn test2() {}
|
||||
fn test3() {}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/code",
|
||||
json!({
|
||||
"root1": {
|
||||
"lib": {
|
||||
"file1.rs": "mod example;",
|
||||
"file2.rs": "",
|
||||
},
|
||||
"test": {
|
||||
"file3.rs": file_3_contents,
|
||||
}
|
||||
},
|
||||
"root2": {
|
||||
"src": {
|
||||
"main.rs": ""
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(
|
||||
fs.clone(),
|
||||
["/code/root1".as_ref(), "/code/root2".as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let worktree_ids = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees()
|
||||
.map(|worktree| worktree.read(cx).id())
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let mut ax = ProjectContext::new(project.downgrade(), fs);
|
||||
|
||||
ax.add_file(ProjectPath {
|
||||
worktree_id: worktree_ids[0],
|
||||
path: Path::new("lib/file1.rs").into(),
|
||||
});
|
||||
|
||||
let message = cx
|
||||
.update(|cx| ax.generate_system_message(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
r#"
|
||||
project structure:
|
||||
# root1
|
||||
## lib/file1.rs
|
||||
~~~
|
||||
mod example;
|
||||
~~~
|
||||
"#
|
||||
.unindent(),
|
||||
message
|
||||
);
|
||||
|
||||
ax.add_excerpts(
|
||||
ProjectPath {
|
||||
worktree_id: worktree_ids[0],
|
||||
path: Path::new("test/file3.rs").into(),
|
||||
},
|
||||
&[
|
||||
file_3_contents.find("fn test2").unwrap()
|
||||
..file_3_contents.find("fn test3").unwrap(),
|
||||
],
|
||||
);
|
||||
|
||||
let message = cx
|
||||
.update(|cx| ax.generate_system_message(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
r#"
|
||||
project structure:
|
||||
# root1
|
||||
## lib/file1.rs
|
||||
~~~
|
||||
mod example;
|
||||
~~~
|
||||
## test/file3.rs
|
||||
~~~
|
||||
...
|
||||
fn test2() {}
|
||||
...
|
||||
~~~
|
||||
"#
|
||||
.unindent(),
|
||||
message
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
259
crates/assistant_tooling/src/registry.rs
Normal file
@@ -0,0 +1,259 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AnyView, Task, WindowContext};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::tool::{
|
||||
LanguageModelTool, ToolFunctionCall, ToolFunctionCallResult, ToolFunctionDefinition,
|
||||
};
|
||||
|
||||
pub struct ToolRegistry {
|
||||
tools: HashMap<
|
||||
String,
|
||||
Box<dyn Fn(&ToolFunctionCall, &mut WindowContext) -> Task<Result<ToolFunctionCall>>>,
|
||||
>,
|
||||
definitions: Vec<ToolFunctionDefinition>,
|
||||
status_views: Vec<AnyView>,
|
||||
}
|
||||
|
||||
impl ToolRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tools: HashMap::new(),
|
||||
definitions: Vec::new(),
|
||||
status_views: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> &[ToolFunctionDefinition] {
|
||||
&self.definitions
|
||||
}
|
||||
|
||||
pub fn register<T: 'static + LanguageModelTool>(
|
||||
&mut self,
|
||||
tool: T,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<()> {
|
||||
self.definitions.push(tool.definition());
|
||||
|
||||
if let Some(tool_view) = tool.status_view(cx) {
|
||||
self.status_views.push(tool_view);
|
||||
}
|
||||
|
||||
let name = tool.name();
|
||||
let previous = self.tools.insert(
|
||||
name.clone(),
|
||||
// registry.call(tool_call, cx)
|
||||
Box::new(
|
||||
move |tool_call: &ToolFunctionCall, cx: &mut WindowContext| {
|
||||
let name = tool_call.name.clone();
|
||||
let arguments = tool_call.arguments.clone();
|
||||
let id = tool_call.id.clone();
|
||||
|
||||
let Ok(input) = serde_json::from_str::<T::Input>(arguments.as_str()) else {
|
||||
return Task::ready(Ok(ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::ParsingFailed),
|
||||
}));
|
||||
};
|
||||
|
||||
let result = tool.execute(&input, cx);
|
||||
|
||||
cx.spawn(move |mut cx| async move {
|
||||
let result: Result<T::Output> = result.await;
|
||||
let for_model = T::format(&input, &result);
|
||||
let view = cx.update(|cx| T::output_view(id.clone(), input, result, cx))?;
|
||||
|
||||
Ok(ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::Finished {
|
||||
view: view.into(),
|
||||
for_model,
|
||||
}),
|
||||
})
|
||||
})
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
if previous.is_some() {
|
||||
return Err(anyhow!("already registered a tool with name {}", name));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Task yields an error if the window for the given WindowContext is closed before the task completes.
|
||||
pub fn call(
|
||||
&self,
|
||||
tool_call: &ToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<ToolFunctionCall>> {
|
||||
let name = tool_call.name.clone();
|
||||
let arguments = tool_call.arguments.clone();
|
||||
let id = tool_call.id.clone();
|
||||
|
||||
let tool = match self.tools.get(&name) {
|
||||
Some(tool) => tool,
|
||||
None => {
|
||||
let name = name.clone();
|
||||
return Task::ready(Ok(ToolFunctionCall {
|
||||
id,
|
||||
name: name.clone(),
|
||||
arguments,
|
||||
result: Some(ToolFunctionCallResult::NoSuchTool),
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
tool(tool_call, cx)
|
||||
}
|
||||
|
||||
pub fn status_views(&self) -> &[AnyView] {
|
||||
&self.status_views
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{div, prelude::*, Render, TestAppContext};
|
||||
use gpui::{EmptyView, View};
|
||||
use schemars::schema_for;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema)]
|
||||
struct WeatherQuery {
|
||||
location: String,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
struct WeatherTool {
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]
|
||||
struct WeatherResult {
|
||||
location: String,
|
||||
temperature: f64,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
struct WeatherView {
|
||||
result: WeatherResult,
|
||||
}
|
||||
|
||||
impl Render for WeatherView {
|
||||
fn render(&mut self, _cx: &mut gpui::ViewContext<Self>) -> impl IntoElement {
|
||||
div().child(format!("temperature: {}", self.result.temperature))
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for WeatherTool {
|
||||
type Input = WeatherQuery;
|
||||
type Output = WeatherResult;
|
||||
type View = WeatherView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"get_current_weather".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Fetches the current weather for a given location.".to_string()
|
||||
}
|
||||
|
||||
fn execute(
|
||||
&self,
|
||||
input: &Self::Input,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Self::Output>> {
|
||||
let _location = input.location.clone();
|
||||
let _unit = input.unit.clone();
|
||||
|
||||
let weather = self.current_weather.clone();
|
||||
|
||||
Task::ready(Ok(weather))
|
||||
}
|
||||
|
||||
fn output_view(
|
||||
_tool_call_id: String,
|
||||
_input: Self::Input,
|
||||
result: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> View<Self::View> {
|
||||
cx.new_view(|_cx| {
|
||||
let result = result.unwrap();
|
||||
WeatherView { result }
|
||||
})
|
||||
}
|
||||
|
||||
fn format(_: &Self::Input, output: &Result<Self::Output>) -> String {
|
||||
serde_json::to_string(&output.as_ref().unwrap()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_openai_weather_example(cx: &mut TestAppContext) {
|
||||
cx.background_executor.run_until_parked();
|
||||
let (_, cx) = cx.add_window_view(|_cx| EmptyView);
|
||||
|
||||
let tool = WeatherTool {
|
||||
current_weather: WeatherResult {
|
||||
location: "San Francisco".to_string(),
|
||||
temperature: 21.0,
|
||||
unit: "Celsius".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
let tools = vec![tool.definition()];
|
||||
assert_eq!(tools.len(), 1);
|
||||
|
||||
let expected = ToolFunctionDefinition {
|
||||
name: "get_current_weather".to_string(),
|
||||
description: "Fetches the current weather for a given location.".to_string(),
|
||||
parameters: schema_for!(WeatherQuery),
|
||||
};
|
||||
|
||||
assert_eq!(tools[0].name, expected.name);
|
||||
assert_eq!(tools[0].description, expected.description);
|
||||
|
||||
let expected_schema = serde_json::to_value(&tools[0].parameters).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expected_schema,
|
||||
json!({
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "WeatherQuery",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["location", "unit"]
|
||||
})
|
||||
);
|
||||
|
||||
let args = json!({
|
||||
"location": "San Francisco",
|
||||
"unit": "Celsius"
|
||||
});
|
||||
|
||||
let query: WeatherQuery = serde_json::from_value(args).unwrap();
|
||||
|
||||
let result = cx.update(|cx| tool.execute(&query, cx)).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let result = result.unwrap();
|
||||
|
||||
assert_eq!(result, tool.current_weather);
|
||||
}
|
||||
}
|
||||
111
crates/assistant_tooling/src/tool.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{AnyElement, AnyView, IntoElement as _, Render, Task, View, WindowContext};
|
||||
use schemars::{schema::RootSchema, schema_for, JsonSchema};
|
||||
use serde::Deserialize;
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct ToolFunctionCall {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
#[serde(skip)]
|
||||
pub result: Option<ToolFunctionCallResult>,
|
||||
}
|
||||
|
||||
pub enum ToolFunctionCallResult {
|
||||
NoSuchTool,
|
||||
ParsingFailed,
|
||||
Finished { for_model: String, view: AnyView },
|
||||
}
|
||||
|
||||
impl ToolFunctionCallResult {
|
||||
pub fn format(&self, name: &String) -> String {
|
||||
match self {
|
||||
ToolFunctionCallResult::NoSuchTool => format!("No tool for {name}"),
|
||||
ToolFunctionCallResult::ParsingFailed => {
|
||||
format!("Unable to parse arguments for {name}")
|
||||
}
|
||||
ToolFunctionCallResult::Finished { for_model, .. } => for_model.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_any_element(&self, name: &String) -> AnyElement {
|
||||
match self {
|
||||
ToolFunctionCallResult::NoSuchTool => {
|
||||
format!("Language Model attempted to call {name}").into_any_element()
|
||||
}
|
||||
ToolFunctionCallResult::ParsingFailed => {
|
||||
format!("Language Model called {name} with bad arguments").into_any_element()
|
||||
}
|
||||
ToolFunctionCallResult::Finished { view, .. } => view.clone().into_any_element(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: RootSchema,
|
||||
}
|
||||
|
||||
impl Display for ToolFunctionDefinition {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let schema = serde_json::to_string(&self.parameters).ok();
|
||||
let schema = schema.unwrap_or("None".to_string());
|
||||
write!(f, "Name: {}:\n", self.name)?;
|
||||
write!(f, "Description: {}\n", self.description)?;
|
||||
write!(f, "Parameters: {}", schema)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LanguageModelTool {
|
||||
/// The input type that will be passed in to `execute` when the tool is called
|
||||
/// by the language model.
|
||||
type Input: for<'de> Deserialize<'de> + JsonSchema;
|
||||
|
||||
/// The output returned by executing the tool.
|
||||
type Output: 'static;
|
||||
|
||||
type View: Render;
|
||||
|
||||
/// Returns the name of the tool.
|
||||
///
|
||||
/// This name is exposed to the language model to allow the model to pick
|
||||
/// which tools to use. As this name is used to identify the tool within a
|
||||
/// tool registry, it should be unique.
|
||||
fn name(&self) -> String;
|
||||
|
||||
/// Returns the description of the tool.
|
||||
///
|
||||
/// This can be used to _prompt_ the model as to what the tool does.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the OpenAI Function definition for the tool, for direct use with OpenAI's API.
|
||||
fn definition(&self) -> ToolFunctionDefinition {
|
||||
let root_schema = schema_for!(Self::Input);
|
||||
|
||||
ToolFunctionDefinition {
|
||||
name: self.name(),
|
||||
description: self.description(),
|
||||
parameters: root_schema,
|
||||
}
|
||||
}
|
||||
|
||||
/// Executes the tool with the given input.
|
||||
fn execute(&self, input: &Self::Input, cx: &mut WindowContext) -> Task<Result<Self::Output>>;
|
||||
|
||||
fn format(input: &Self::Input, output: &Result<Self::Output>) -> String;
|
||||
|
||||
fn output_view(
|
||||
tool_call_id: String,
|
||||
input: Self::Input,
|
||||
output: Result<Self::Output>,
|
||||
cx: &mut WindowContext,
|
||||
) -> View<Self::View>;
|
||||
|
||||
fn status_view(&self, _cx: &mut WindowContext) -> Option<AnyView> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,526 +0,0 @@
|
||||
use crate::ProjectContext;
|
||||
use anyhow::{anyhow, Result};
|
||||
use gpui::{AnyElement, AnyView, IntoElement, Render, Task, View, WindowContext};
|
||||
use repair_json::repair;
|
||||
use schemars::{schema::RootSchema, schema_for, JsonSchema};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
fmt::Display,
|
||||
mem,
|
||||
sync::atomic::{AtomicBool, Ordering::SeqCst},
|
||||
};
|
||||
use ui::ViewContext;
|
||||
|
||||
pub struct ToolRegistry {
|
||||
registered_tools: HashMap<String, RegisteredTool>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ToolFunctionCall {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
state: ToolFunctionCallState,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
enum ToolFunctionCallState {
|
||||
#[default]
|
||||
Initializing,
|
||||
NoSuchTool,
|
||||
KnownTool(Box<dyn ToolView>),
|
||||
ExecutedTool(Box<dyn ToolView>),
|
||||
}
|
||||
|
||||
trait ToolView {
|
||||
fn view(&self) -> AnyView;
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String;
|
||||
fn try_set_input(&self, input: &str, cx: &mut WindowContext);
|
||||
fn execute(&self, cx: &mut WindowContext) -> Task<Result<()>>;
|
||||
fn serialize_output(&self, cx: &mut WindowContext) -> Result<Box<RawValue>>;
|
||||
fn deserialize_output(&self, raw_value: &RawValue, cx: &mut WindowContext) -> Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub struct SavedToolFunctionCall {
|
||||
id: String,
|
||||
name: String,
|
||||
arguments: String,
|
||||
state: SavedToolFunctionCallState,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
enum SavedToolFunctionCallState {
|
||||
#[default]
|
||||
Initializing,
|
||||
NoSuchTool,
|
||||
KnownTool,
|
||||
ExecutedTool(Box<RawValue>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct ToolFunctionDefinition {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub parameters: RootSchema,
|
||||
}
|
||||
|
||||
pub trait LanguageModelTool {
|
||||
type View: ToolOutput;
|
||||
|
||||
/// Returns the name of the tool.
|
||||
///
|
||||
/// This name is exposed to the language model to allow the model to pick
|
||||
/// which tools to use. As this name is used to identify the tool within a
|
||||
/// tool registry, it should be unique.
|
||||
fn name(&self) -> String;
|
||||
|
||||
/// Returns the description of the tool.
|
||||
///
|
||||
/// This can be used to _prompt_ the model as to what the tool does.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the OpenAI Function definition for the tool, for direct use with OpenAI's API.
|
||||
fn definition(&self) -> ToolFunctionDefinition {
|
||||
let root_schema = schema_for!(<Self::View as ToolOutput>::Input);
|
||||
|
||||
ToolFunctionDefinition {
|
||||
name: self.name(),
|
||||
description: self.description(),
|
||||
parameters: root_schema,
|
||||
}
|
||||
}
|
||||
|
||||
/// A view of the output of running the tool, for displaying to the user.
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View>;
|
||||
}
|
||||
|
||||
pub trait ToolOutput: Render {
|
||||
/// The input type that will be passed in to `execute` when the tool is called
|
||||
/// by the language model.
|
||||
type Input: DeserializeOwned + JsonSchema;
|
||||
|
||||
/// The output returned by executing the tool.
|
||||
type SerializedState: DeserializeOwned + Serialize;
|
||||
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut ViewContext<Self>) -> String;
|
||||
fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext<Self>);
|
||||
fn execute(&mut self, cx: &mut ViewContext<Self>) -> Task<Result<()>>;
|
||||
|
||||
fn serialize(&self, cx: &mut ViewContext<Self>) -> Self::SerializedState;
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
output: Self::SerializedState,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
||||
struct RegisteredTool {
|
||||
enabled: AtomicBool,
|
||||
type_id: TypeId,
|
||||
build_view: Box<dyn Fn(&mut WindowContext) -> Box<dyn ToolView>>,
|
||||
definition: ToolFunctionDefinition,
|
||||
}
|
||||
|
||||
impl ToolRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
registered_tools: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_tool_enabled<T: 'static + LanguageModelTool>(&self, is_enabled: bool) {
|
||||
for tool in self.registered_tools.values() {
|
||||
if tool.type_id == TypeId::of::<T>() {
|
||||
tool.enabled.store(is_enabled, SeqCst);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_tool_enabled<T: 'static + LanguageModelTool>(&self) -> bool {
|
||||
for tool in self.registered_tools.values() {
|
||||
if tool.type_id == TypeId::of::<T>() {
|
||||
return tool.enabled.load(SeqCst);
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> Vec<ToolFunctionDefinition> {
|
||||
self.registered_tools
|
||||
.values()
|
||||
.filter(|tool| tool.enabled.load(SeqCst))
|
||||
.map(|tool| tool.definition.clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn update_tool_call(
|
||||
&self,
|
||||
call: &mut ToolFunctionCall,
|
||||
name: Option<&str>,
|
||||
arguments: Option<&str>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
if let Some(name) = name {
|
||||
call.name.push_str(name);
|
||||
}
|
||||
if let Some(arguments) = arguments {
|
||||
if call.arguments.is_empty() {
|
||||
if let Some(tool) = self.registered_tools.get(&call.name) {
|
||||
let view = (tool.build_view)(cx);
|
||||
call.state = ToolFunctionCallState::KnownTool(view);
|
||||
} else {
|
||||
call.state = ToolFunctionCallState::NoSuchTool;
|
||||
}
|
||||
}
|
||||
call.arguments.push_str(arguments);
|
||||
|
||||
if let ToolFunctionCallState::KnownTool(view) = &call.state {
|
||||
if let Ok(repaired_arguments) = repair(call.arguments.clone()) {
|
||||
view.try_set_input(&repaired_arguments, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute_tool_call(
|
||||
&self,
|
||||
tool_call: &mut ToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
if let ToolFunctionCallState::KnownTool(view) = mem::take(&mut tool_call.state) {
|
||||
let task = view.execute(cx);
|
||||
tool_call.state = ToolFunctionCallState::ExecutedTool(view);
|
||||
Some(task)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_tool_call(
|
||||
&self,
|
||||
tool_call: &ToolFunctionCall,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
match &tool_call.state {
|
||||
ToolFunctionCallState::NoSuchTool => {
|
||||
Some(ui::Label::new("No such tool").into_any_element())
|
||||
}
|
||||
ToolFunctionCallState::Initializing => None,
|
||||
ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => {
|
||||
Some(view.view().into_any_element())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_for_tool_call(
|
||||
&self,
|
||||
tool_call: &ToolFunctionCall,
|
||||
project_context: &mut ProjectContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> String {
|
||||
match &tool_call.state {
|
||||
ToolFunctionCallState::Initializing => String::new(),
|
||||
ToolFunctionCallState::NoSuchTool => {
|
||||
format!("No such tool: {}", tool_call.name)
|
||||
}
|
||||
ToolFunctionCallState::KnownTool(view) | ToolFunctionCallState::ExecutedTool(view) => {
|
||||
view.generate(project_context, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_tool_call(
|
||||
&self,
|
||||
call: &ToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<SavedToolFunctionCall> {
|
||||
Ok(SavedToolFunctionCall {
|
||||
id: call.id.clone(),
|
||||
name: call.name.clone(),
|
||||
arguments: call.arguments.clone(),
|
||||
state: match &call.state {
|
||||
ToolFunctionCallState::Initializing => SavedToolFunctionCallState::Initializing,
|
||||
ToolFunctionCallState::NoSuchTool => SavedToolFunctionCallState::NoSuchTool,
|
||||
ToolFunctionCallState::KnownTool(_) => SavedToolFunctionCallState::KnownTool,
|
||||
ToolFunctionCallState::ExecutedTool(view) => {
|
||||
SavedToolFunctionCallState::ExecutedTool(view.serialize_output(cx)?)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn deserialize_tool_call(
|
||||
&self,
|
||||
call: &SavedToolFunctionCall,
|
||||
cx: &mut WindowContext,
|
||||
) -> Result<ToolFunctionCall> {
|
||||
let Some(tool) = self.registered_tools.get(&call.name) else {
|
||||
return Err(anyhow!("no such tool {}", call.name));
|
||||
};
|
||||
|
||||
Ok(ToolFunctionCall {
|
||||
id: call.id.clone(),
|
||||
name: call.name.clone(),
|
||||
arguments: call.arguments.clone(),
|
||||
state: match &call.state {
|
||||
SavedToolFunctionCallState::Initializing => ToolFunctionCallState::Initializing,
|
||||
SavedToolFunctionCallState::NoSuchTool => ToolFunctionCallState::NoSuchTool,
|
||||
SavedToolFunctionCallState::KnownTool => {
|
||||
log::error!("Deserialized tool that had not executed");
|
||||
let view = (tool.build_view)(cx);
|
||||
view.try_set_input(&call.arguments, cx);
|
||||
ToolFunctionCallState::KnownTool(view)
|
||||
}
|
||||
SavedToolFunctionCallState::ExecutedTool(output) => {
|
||||
let view = (tool.build_view)(cx);
|
||||
view.try_set_input(&call.arguments, cx);
|
||||
view.deserialize_output(output, cx)?;
|
||||
ToolFunctionCallState::ExecutedTool(view)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn register<T: 'static + LanguageModelTool>(&mut self, tool: T) -> Result<()> {
|
||||
let name = tool.name();
|
||||
let registered_tool = RegisteredTool {
|
||||
type_id: TypeId::of::<T>(),
|
||||
definition: tool.definition(),
|
||||
enabled: AtomicBool::new(true),
|
||||
build_view: Box::new(move |cx: &mut WindowContext| Box::new(tool.view(cx))),
|
||||
};
|
||||
|
||||
let previous = self.registered_tools.insert(name.clone(), registered_tool);
|
||||
if previous.is_some() {
|
||||
return Err(anyhow!("already registered a tool with name {}", name));
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToolOutput> ToolView for View<T> {
|
||||
fn view(&self) -> AnyView {
|
||||
self.clone().into()
|
||||
}
|
||||
|
||||
fn generate(&self, project: &mut ProjectContext, cx: &mut WindowContext) -> String {
|
||||
self.update(cx, |view, cx| view.generate(project, cx))
|
||||
}
|
||||
|
||||
fn try_set_input(&self, input: &str, cx: &mut WindowContext) {
|
||||
if let Ok(input) = serde_json::from_str::<T::Input>(input) {
|
||||
self.update(cx, |view, cx| {
|
||||
view.set_input(input, cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn execute(&self, cx: &mut WindowContext) -> Task<Result<()>> {
|
||||
self.update(cx, |view, cx| view.execute(cx))
|
||||
}
|
||||
|
||||
fn serialize_output(&self, cx: &mut WindowContext) -> Result<Box<RawValue>> {
|
||||
let output = self.update(cx, |view, cx| view.serialize(cx));
|
||||
Ok(RawValue::from_string(serde_json::to_string(&output)?)?)
|
||||
}
|
||||
|
||||
fn deserialize_output(&self, output: &RawValue, cx: &mut WindowContext) -> Result<()> {
|
||||
let state = serde_json::from_str::<T::SerializedState>(output.get())?;
|
||||
self.update(cx, |view, cx| view.deserialize(state, cx))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ToolFunctionDefinition {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let schema = serde_json::to_string(&self.parameters).ok();
|
||||
let schema = schema.unwrap_or("None".to_string());
|
||||
write!(f, "Name: {}:\n", self.name)?;
|
||||
write!(f, "Description: {}\n", self.description)?;
|
||||
write!(f, "Parameters: {}", schema)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use gpui::{div, prelude::*, Render, TestAppContext};
|
||||
use gpui::{EmptyView, View};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema)]
|
||||
struct WeatherQuery {
|
||||
location: String,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]
|
||||
struct WeatherResult {
|
||||
location: String,
|
||||
temperature: f64,
|
||||
unit: String,
|
||||
}
|
||||
|
||||
struct WeatherView {
|
||||
input: Option<WeatherQuery>,
|
||||
result: Option<WeatherResult>,
|
||||
|
||||
// Fake API call
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
struct WeatherTool {
|
||||
current_weather: WeatherResult,
|
||||
}
|
||||
|
||||
impl WeatherView {
|
||||
fn new(current_weather: WeatherResult) -> Self {
|
||||
Self {
|
||||
input: None,
|
||||
result: None,
|
||||
current_weather,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for WeatherView {
|
||||
fn render(&mut self, _cx: &mut gpui::ViewContext<Self>) -> impl IntoElement {
|
||||
match self.result {
|
||||
Some(ref result) => div()
|
||||
.child(format!("temperature: {}", result.temperature))
|
||||
.into_any_element(),
|
||||
None => div().child("Calculating weather...").into_any_element(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolOutput for WeatherView {
|
||||
type Input = WeatherQuery;
|
||||
|
||||
type SerializedState = WeatherResult;
|
||||
|
||||
fn generate(&self, _output: &mut ProjectContext, _cx: &mut ViewContext<Self>) -> String {
|
||||
serde_json::to_string(&self.result).unwrap()
|
||||
}
|
||||
|
||||
fn set_input(&mut self, input: Self::Input, cx: &mut ViewContext<Self>) {
|
||||
self.input = Some(input);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn execute(&mut self, _cx: &mut ViewContext<Self>) -> Task<Result<()>> {
|
||||
let input = self.input.as_ref().unwrap();
|
||||
|
||||
let _location = input.location.clone();
|
||||
let _unit = input.unit.clone();
|
||||
|
||||
let weather = self.current_weather.clone();
|
||||
|
||||
self.result = Some(weather);
|
||||
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn serialize(&self, _cx: &mut ViewContext<Self>) -> Self::SerializedState {
|
||||
self.current_weather.clone()
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
&mut self,
|
||||
output: Self::SerializedState,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) -> Result<()> {
|
||||
self.current_weather = output;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageModelTool for WeatherTool {
|
||||
type View = WeatherView;
|
||||
|
||||
fn name(&self) -> String {
|
||||
"get_current_weather".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Fetches the current weather for a given location.".to_string()
|
||||
}
|
||||
|
||||
fn view(&self, cx: &mut WindowContext) -> View<Self::View> {
|
||||
cx.new_view(|_cx| WeatherView::new(self.current_weather.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_openai_weather_example(cx: &mut TestAppContext) {
|
||||
let (_, cx) = cx.add_window_view(|_cx| EmptyView);
|
||||
|
||||
let mut registry = ToolRegistry::new();
|
||||
registry
|
||||
.register(WeatherTool {
|
||||
current_weather: WeatherResult {
|
||||
location: "San Francisco".to_string(),
|
||||
temperature: 21.0,
|
||||
unit: "Celsius".to_string(),
|
||||
},
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let definitions = registry.definitions();
|
||||
assert_eq!(
|
||||
definitions,
|
||||
[ToolFunctionDefinition {
|
||||
name: "get_current_weather".to_string(),
|
||||
description: "Fetches the current weather for a given location.".to_string(),
|
||||
parameters: serde_json::from_value(json!({
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "WeatherQuery",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["location", "unit"]
|
||||
}))
|
||||
.unwrap(),
|
||||
}]
|
||||
);
|
||||
|
||||
let mut call = ToolFunctionCall {
|
||||
id: "the-id".to_string(),
|
||||
name: "get_cur".to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let task = cx.update(|cx| {
|
||||
registry.update_tool_call(
|
||||
&mut call,
|
||||
Some("rent_weather"),
|
||||
Some(r#"{"location": "San Francisco","#),
|
||||
cx,
|
||||
);
|
||||
registry.update_tool_call(&mut call, None, Some(r#" "unit": "Celsius"}"#), cx);
|
||||
registry.execute_tool_call(&mut call, cx).unwrap()
|
||||
});
|
||||
task.await.unwrap();
|
||||
|
||||
match &call.state {
|
||||
ToolFunctionCallState::ExecutedTool(_view) => {}
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,7 @@ use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPrevi
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde_derive::Serialize;
|
||||
use smol::{fs, io::AsyncReadExt};
|
||||
use smol::io::AsyncReadExt;
|
||||
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use smol::{fs::File, process::Command};
|
||||
@@ -24,7 +24,6 @@ use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
use std::{
|
||||
env::consts::{ARCH, OS},
|
||||
ffi::OsString,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
@@ -56,22 +55,16 @@ struct UpdateRequestBody {
|
||||
telemetry: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub enum AutoUpdateStatus {
|
||||
Idle,
|
||||
Checking,
|
||||
Downloading,
|
||||
Installing,
|
||||
Updated { binary_path: PathBuf },
|
||||
Updated,
|
||||
Errored,
|
||||
}
|
||||
|
||||
impl AutoUpdateStatus {
|
||||
pub fn is_updated(&self) -> bool {
|
||||
matches!(self, Self::Updated { .. })
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AutoUpdater {
|
||||
status: AutoUpdateStatus,
|
||||
current_version: SemanticVersion,
|
||||
@@ -228,9 +221,9 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("", markdown, cx)
|
||||
});
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.create_buffer("", markdown, cx))
|
||||
.expect("creating buffers on a local workspace always succeeds");
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, body.release_notes)], None, cx)
|
||||
});
|
||||
@@ -312,7 +305,7 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
pub fn poll(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if self.pending_poll.is_some() || self.status.is_updated() {
|
||||
if self.pending_poll.is_some() || self.status == AutoUpdateStatus::Updated {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -334,7 +327,7 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
pub fn status(&self) -> AutoUpdateStatus {
|
||||
self.status.clone()
|
||||
self.status
|
||||
}
|
||||
|
||||
pub fn dismiss_error(&mut self, cx: &mut ModelContext<Self>) {
|
||||
@@ -347,15 +340,9 @@ impl AutoUpdater {
|
||||
(this.http_client.clone(), this.current_version)
|
||||
})?;
|
||||
|
||||
let asset = match OS {
|
||||
"linux" => format!("zed-linux-{}.tar.gz", ARCH),
|
||||
"macos" => "Zed.dmg".into(),
|
||||
_ => return Err(anyhow!("auto-update not supported for OS {:?}", OS)),
|
||||
};
|
||||
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, OS, ARCH
|
||||
"/api/releases/latest?asset=Zed.dmg&os={}&arch={}",
|
||||
OS, ARCH
|
||||
));
|
||||
cx.update(|cx| {
|
||||
if let Some(param) = ReleaseChannel::try_global(cx)
|
||||
@@ -374,7 +361,6 @@ impl AutoUpdater {
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("error reading release")?;
|
||||
|
||||
let release: JsonRelease =
|
||||
serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
|
||||
|
||||
@@ -403,31 +389,88 @@ impl AutoUpdater {
|
||||
let temp_dir = tempfile::Builder::new()
|
||||
.prefix("zed-auto-update")
|
||||
.tempdir()?;
|
||||
let downloaded_asset = download_release(&temp_dir, release, &asset, client, &cx).await?;
|
||||
let dmg_path = temp_dir.path().join("Zed.dmg");
|
||||
let mount_path = temp_dir.path().join("Zed");
|
||||
let running_app_path = ZED_APP_PATH
|
||||
.clone()
|
||||
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
|
||||
let running_app_filename = running_app_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid running app path"))?;
|
||||
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
|
||||
mounted_app_path.push("/");
|
||||
|
||||
let mut dmg_file = File::create(&dmg_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
let installation_id = Client::global(cx).telemetry().installation_id();
|
||||
let release_channel = ReleaseChannel::try_global(cx)
|
||||
.map(|release_channel| release_channel.display_name());
|
||||
let telemetry = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(installation_id, release_channel, telemetry)
|
||||
})?;
|
||||
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry,
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
smol::io::copy(response.body_mut(), &mut dmg_file).await?;
|
||||
log::info!("downloaded update. path:{:?}", dmg_path);
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
// We store the path of our current binary, before we install, since installation might
|
||||
// delete it. Once deleted, it's hard to get the path to our binary on Linux.
|
||||
// So we cache it here, which allows us to then restart later on.
|
||||
let binary_path = cx.update(|cx| cx.app_path())??;
|
||||
let output = Command::new("hdiutil")
|
||||
.args(&["attach", "-nobrowse"])
|
||||
.arg(&dmg_path)
|
||||
.arg("-mountroot")
|
||||
.arg(&temp_dir.path())
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
Err(anyhow!(
|
||||
"failed to mount: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))?;
|
||||
}
|
||||
|
||||
match OS {
|
||||
"macos" => install_release_macos(&temp_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&temp_dir, downloaded_asset, &cx).await,
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
}?;
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
.arg(&mounted_app_path)
|
||||
.arg(&running_app_path)
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
Err(anyhow!(
|
||||
"failed to copy app: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))?;
|
||||
}
|
||||
|
||||
let output = Command::new("hdiutil")
|
||||
.args(&["detach"])
|
||||
.arg(&mount_path)
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
Err(anyhow!(
|
||||
"failed to unmount: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.set_should_show_update_notification(true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
this.status = AutoUpdateStatus::Updated { binary_path };
|
||||
this.status = AutoUpdateStatus::Updated;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -461,150 +504,3 @@ impl AutoUpdater {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn download_release(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
release: JsonRelease,
|
||||
target_filename: &str,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
let target_path = temp_dir.path().join(target_filename);
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
let installation_id = Client::global(cx).telemetry().installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(installation_id, release_channel, telemetry)
|
||||
})?;
|
||||
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry,
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
log::info!("downloaded update. path:{:?}", target_path);
|
||||
|
||||
Ok(target_path)
|
||||
}
|
||||
|
||||
async fn install_release_linux(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
downloaded_tar_gz: PathBuf,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let channel = cx.update(|cx| ReleaseChannel::global(cx).dev_name())?;
|
||||
let home_dir = PathBuf::from(std::env::var("HOME").context("no HOME env var set")?);
|
||||
|
||||
let extracted = temp_dir.path().join("zed");
|
||||
fs::create_dir_all(&extracted)
|
||||
.await
|
||||
.context("failed to create directory into which to extract update")?;
|
||||
|
||||
let output = Command::new("tar")
|
||||
.arg("-xzf")
|
||||
.arg(&downloaded_tar_gz)
|
||||
.arg("-C")
|
||||
.arg(&extracted)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to extract {:?} to {:?}: {:?}",
|
||||
downloaded_tar_gz,
|
||||
extracted,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
let suffix = if channel != "stable" {
|
||||
format!("-{}", channel)
|
||||
} else {
|
||||
String::default()
|
||||
};
|
||||
let app_folder_name = format!("zed{}.app", suffix);
|
||||
|
||||
let from = extracted.join(&app_folder_name);
|
||||
let to = home_dir.join(".local");
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
.arg(&from)
|
||||
.arg(&to)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to copy Zed update from {:?} to {:?}: {:?}",
|
||||
from,
|
||||
to,
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_release_macos(
|
||||
temp_dir: &tempfile::TempDir,
|
||||
downloaded_dmg: PathBuf,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let running_app_path = ZED_APP_PATH
|
||||
.clone()
|
||||
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
|
||||
let running_app_filename = running_app_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid running app path"))?;
|
||||
|
||||
let mount_path = temp_dir.path().join("Zed");
|
||||
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
|
||||
|
||||
mounted_app_path.push("/");
|
||||
let output = Command::new("hdiutil")
|
||||
.args(&["attach", "-nobrowse"])
|
||||
.arg(&downloaded_dmg)
|
||||
.arg("-mountroot")
|
||||
.arg(&temp_dir.path())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to mount: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
.arg(&mounted_app_path)
|
||||
.arg(&running_app_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to copy app: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
let output = Command::new("hdiutil")
|
||||
.args(&["detach"])
|
||||
.arg(&mount_path)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"failed to unount: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1203,12 +1203,11 @@ impl Room {
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
|
||||
{
|
||||
let request = if let Some(remote_project_id) = project.read(cx).remote_project_id() {
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: vec![],
|
||||
dev_server_project_id: Some(dev_server_project_id.0),
|
||||
remote_project_id: Some(remote_project_id.0),
|
||||
})
|
||||
} else {
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
@@ -1218,7 +1217,7 @@ impl Room {
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
dev_server_project_id: None,
|
||||
remote_project_id: None,
|
||||
})
|
||||
};
|
||||
|
||||
|
||||
@@ -19,17 +19,11 @@ path = "src/main.rs"
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
clap.workspace = true
|
||||
libc.workspace = true
|
||||
ipc-channel = "0.18"
|
||||
once_cell.workspace = true
|
||||
release_channel.workspace = true
|
||||
serde.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
exec.workspace = true
|
||||
fork.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation.workspace = true
|
||||
core-services = "0.2"
|
||||
|
||||
@@ -13,7 +13,6 @@ pub enum CliRequest {
|
||||
paths: Vec<String>,
|
||||
wait: bool,
|
||||
open_new_workspace: Option<bool>,
|
||||
dev_server_token: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
#![cfg_attr(any(target_os = "linux", target_os = "windows"), allow(dead_code))]
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use clap::Parser;
|
||||
use cli::{ipc::IpcOneShotServer, CliRequest, CliResponse, IpcHandshake};
|
||||
use cli::{CliRequest, CliResponse};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
env, fs,
|
||||
env,
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use util::paths::PathLikeWithPosition;
|
||||
|
||||
struct Detect;
|
||||
|
||||
trait InstalledApp {
|
||||
fn zed_version_string(&self) -> String;
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "zed", disable_version_flag = true)]
|
||||
struct Args {
|
||||
@@ -37,9 +33,9 @@ struct Args {
|
||||
/// Print Zed's version and the app path.
|
||||
#[arg(short, long)]
|
||||
version: bool,
|
||||
/// Custom path to Zed.app or the zed binary
|
||||
#[arg(long)]
|
||||
zed: Option<PathBuf>,
|
||||
/// Custom Zed.app path
|
||||
#[arg(short, long)]
|
||||
bundle_path: Option<PathBuf>,
|
||||
/// Run zed in dev-server mode
|
||||
#[arg(long)]
|
||||
dev_server_token: Option<String>,
|
||||
@@ -53,6 +49,12 @@ fn parse_path_with_position(
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct InfoPlist {
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
bundle_short_version_string: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
// Intercept version designators
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -66,10 +68,14 @@ fn main() -> Result<()> {
|
||||
}
|
||||
let args = Args::parse();
|
||||
|
||||
let app = Detect::detect(args.zed.as_deref()).context("Bundle detection")?;
|
||||
let bundle = Bundle::detect(args.bundle_path.as_deref()).context("Bundle detection")?;
|
||||
|
||||
if let Some(dev_server_token) = args.dev_server_token {
|
||||
return bundle.spawn(vec!["--dev-server-token".into(), dev_server_token]);
|
||||
}
|
||||
|
||||
if args.version {
|
||||
println!("{}", app.zed_version_string());
|
||||
println!("{}", bundle.zed_version_string());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -95,14 +101,7 @@ fn main() -> Result<()> {
|
||||
paths.push(canonicalized.to_string(|path| path.display().to_string()))
|
||||
}
|
||||
|
||||
let (server, server_name) =
|
||||
IpcOneShotServer::<IpcHandshake>::new().context("Handshake before Zed spawn")?;
|
||||
let url = format!("zed-cli://{server_name}");
|
||||
|
||||
app.launch(url)?;
|
||||
let (_, handshake) = server.accept().context("Handshake after Zed spawn")?;
|
||||
let (tx, rx) = (handshake.requests, handshake.responses);
|
||||
|
||||
let (tx, rx) = bundle.launch()?;
|
||||
let open_new_workspace = if args.new {
|
||||
Some(true)
|
||||
} else if args.add {
|
||||
@@ -115,7 +114,6 @@ fn main() -> Result<()> {
|
||||
paths,
|
||||
wait: args.wait,
|
||||
open_new_workspace,
|
||||
dev_server_token: args.dev_server_token,
|
||||
})?;
|
||||
|
||||
while let Ok(response) = rx.recv() {
|
||||
@@ -130,125 +128,60 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
enum Bundle {
|
||||
App {
|
||||
app_bundle: PathBuf,
|
||||
plist: InfoPlist,
|
||||
},
|
||||
LocalPath {
|
||||
executable: PathBuf,
|
||||
plist: InfoPlist,
|
||||
},
|
||||
}
|
||||
|
||||
fn locate_bundle() -> Result<PathBuf> {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
while app_path.extension() != Some(OsStr::new("app")) {
|
||||
if !app_path.pop() {
|
||||
return Err(anyhow!("cannot find app bundle containing {:?}", cli_path));
|
||||
}
|
||||
}
|
||||
Ok(app_path)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
mod linux {
|
||||
use std::{
|
||||
env,
|
||||
ffi::OsString,
|
||||
io,
|
||||
os::{
|
||||
linux::net::SocketAddrExt,
|
||||
unix::net::{SocketAddr, UnixDatagram},
|
||||
},
|
||||
path::{Path, PathBuf},
|
||||
process, thread,
|
||||
time::Duration,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
|
||||
use fork::Fork;
|
||||
use once_cell::sync::Lazy;
|
||||
use cli::{CliRequest, CliResponse};
|
||||
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
||||
|
||||
use crate::{Detect, InstalledApp};
|
||||
use crate::{Bundle, InfoPlist};
|
||||
|
||||
static RELEASE_CHANNEL: Lazy<String> =
|
||||
Lazy::new(|| include_str!("../../zed/RELEASE_CHANNEL").trim().to_string());
|
||||
|
||||
struct App(PathBuf);
|
||||
|
||||
impl Detect {
|
||||
pub fn detect(path: Option<&Path>) -> anyhow::Result<impl InstalledApp> {
|
||||
let path = if let Some(path) = path {
|
||||
path.to_path_buf().canonicalize()
|
||||
} else {
|
||||
let cli = env::current_exe()?;
|
||||
let dir = cli
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("no parent path for cli"))?;
|
||||
|
||||
match dir.join("zed").canonicalize() {
|
||||
Ok(path) => Ok(path),
|
||||
// development builds have Zed capitalized
|
||||
Err(e) => match dir.join("Zed").canonicalize() {
|
||||
Ok(path) => Ok(path),
|
||||
Err(_) => Err(e),
|
||||
},
|
||||
}
|
||||
}?;
|
||||
|
||||
Ok(App(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledApp for App {
|
||||
fn zed_version_string(&self) -> String {
|
||||
format!(
|
||||
"Zed {}{} – {}",
|
||||
if *RELEASE_CHANNEL == "stable" {
|
||||
"".to_string()
|
||||
} else {
|
||||
format!(" {} ", *RELEASE_CHANNEL)
|
||||
},
|
||||
option_env!("RELEASE_VERSION").unwrap_or_default(),
|
||||
self.0.display(),
|
||||
)
|
||||
impl Bundle {
|
||||
pub fn detect(_args_bundle_path: Option<&Path>) -> anyhow::Result<Self> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
let uid: u32 = unsafe { libc::getuid() };
|
||||
let sock_addr =
|
||||
SocketAddr::from_abstract_name(format!("zed-{}-{}", *RELEASE_CHANNEL, uid))?;
|
||||
|
||||
let sock = UnixDatagram::unbound()?;
|
||||
if sock.connect_addr(&sock_addr).is_err() {
|
||||
self.boot_background(ipc_url)?;
|
||||
} else {
|
||||
sock.send(ipc_url.as_bytes())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn boot_background(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
let path = &self.0;
|
||||
|
||||
match fork::fork() {
|
||||
Ok(Fork::Parent(_)) => Ok(()),
|
||||
Ok(Fork::Child) => {
|
||||
std::env::set_var(FORCE_CLI_MODE_ENV_VAR_NAME, "");
|
||||
if let Err(_) = fork::setsid() {
|
||||
eprintln!("failed to setsid: {}", std::io::Error::last_os_error());
|
||||
process::exit(1);
|
||||
}
|
||||
if std::env::var("ZED_KEEP_FD").is_err() {
|
||||
if let Err(_) = fork::close_fd() {
|
||||
eprintln!("failed to close_fd: {}", std::io::Error::last_os_error());
|
||||
}
|
||||
}
|
||||
let error =
|
||||
exec::execvp(path.clone(), &[path.as_os_str(), &OsString::from(ipc_url)]);
|
||||
// if exec succeeded, we never get here.
|
||||
eprintln!("failed to exec {:?}: {}", path, error);
|
||||
process::exit(1)
|
||||
}
|
||||
Err(_) => Err(anyhow!(io::Error::last_os_error())),
|
||||
}
|
||||
pub fn plist(&self) -> &InfoPlist {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn wait_for_socket(
|
||||
&self,
|
||||
sock_addr: &SocketAddr,
|
||||
sock: &mut UnixDatagram,
|
||||
) -> Result<(), std::io::Error> {
|
||||
for _ in 0..100 {
|
||||
thread::sleep(Duration::from_millis(10));
|
||||
if sock.connect_addr(&sock_addr).is_ok() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
sock.connect_addr(&sock_addr)
|
||||
pub fn path(&self) -> &Path {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn launch(&self) -> anyhow::Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn spawn(&self, _args: Vec<String>) -> anyhow::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn zed_version_string(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -256,79 +189,59 @@ mod linux {
|
||||
// todo("windows")
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use crate::{Detect, InstalledApp};
|
||||
use std::path::Path;
|
||||
|
||||
struct App;
|
||||
impl InstalledApp for App {
|
||||
fn zed_version_string(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
fn launch(&self, _ipc_url: String) -> anyhow::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
use cli::{CliRequest, CliResponse};
|
||||
use ipc_channel::ipc::{IpcReceiver, IpcSender};
|
||||
|
||||
impl Detect {
|
||||
pub fn detect(_path: Option<&Path>) -> anyhow::Result<impl InstalledApp> {
|
||||
Ok(App)
|
||||
use crate::{Bundle, InfoPlist};
|
||||
|
||||
impl Bundle {
|
||||
pub fn detect(_args_bundle_path: Option<&Path>) -> anyhow::Result<Self> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn plist(&self) -> &InfoPlist {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &Path {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn launch(&self) -> anyhow::Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn spawn(&self, _args: Vec<String>) -> anyhow::Result<()> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
pub fn zed_version_string(&self) -> String {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod mac_os {
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use anyhow::{Context, Result};
|
||||
use core_foundation::{
|
||||
array::{CFArray, CFIndex},
|
||||
string::kCFStringEncodingUTF8,
|
||||
url::{CFURLCreateWithBytes, CFURL},
|
||||
};
|
||||
use core_services::{kLSLaunchDefaults, LSLaunchURLSpec, LSOpenFromURLSpec, TCFType};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
ptr,
|
||||
};
|
||||
use std::{fs, path::Path, process::Command, ptr};
|
||||
|
||||
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
|
||||
use cli::{CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME};
|
||||
use ipc_channel::ipc::{IpcOneShotServer, IpcReceiver, IpcSender};
|
||||
|
||||
use crate::{Detect, InstalledApp};
|
||||
use crate::{locate_bundle, Bundle, InfoPlist};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct InfoPlist {
|
||||
#[serde(rename = "CFBundleShortVersionString")]
|
||||
bundle_short_version_string: String,
|
||||
}
|
||||
|
||||
enum Bundle {
|
||||
App {
|
||||
app_bundle: PathBuf,
|
||||
plist: InfoPlist,
|
||||
},
|
||||
LocalPath {
|
||||
executable: PathBuf,
|
||||
plist: InfoPlist,
|
||||
},
|
||||
}
|
||||
|
||||
fn locate_bundle() -> Result<PathBuf> {
|
||||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
while app_path.extension() != Some(OsStr::new("app")) {
|
||||
if !app_path.pop() {
|
||||
return Err(anyhow!("cannot find app bundle containing {:?}", cli_path));
|
||||
}
|
||||
}
|
||||
Ok(app_path)
|
||||
}
|
||||
|
||||
impl Detect {
|
||||
pub fn detect(path: Option<&Path>) -> anyhow::Result<impl InstalledApp> {
|
||||
let bundle_path = if let Some(bundle_path) = path {
|
||||
impl Bundle {
|
||||
pub fn detect(args_bundle_path: Option<&Path>) -> anyhow::Result<Self> {
|
||||
let bundle_path = if let Some(bundle_path) = args_bundle_path {
|
||||
bundle_path
|
||||
.canonicalize()
|
||||
.with_context(|| format!("Args bundle path {bundle_path:?} canonicalization"))?
|
||||
@@ -343,7 +256,7 @@ mod mac_os {
|
||||
plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| {
|
||||
format!("Reading *.app bundle plist file at {plist_path:?}")
|
||||
})?;
|
||||
Ok(Bundle::App {
|
||||
Ok(Self::App {
|
||||
app_bundle: bundle_path,
|
||||
plist,
|
||||
})
|
||||
@@ -358,27 +271,42 @@ mod mac_os {
|
||||
plist::from_file::<_, InfoPlist>(&plist_path).with_context(|| {
|
||||
format!("Reading dev bundle plist file at {plist_path:?}")
|
||||
})?;
|
||||
Ok(Bundle::LocalPath {
|
||||
Ok(Self::LocalPath {
|
||||
executable: bundle_path,
|
||||
plist,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledApp for Bundle {
|
||||
fn zed_version_string(&self) -> String {
|
||||
let is_dev = matches!(self, Self::LocalPath { .. });
|
||||
format!(
|
||||
"Zed {}{} – {}",
|
||||
self.plist().bundle_short_version_string,
|
||||
if is_dev { " (dev)" } else { "" },
|
||||
self.path().display(),
|
||||
)
|
||||
fn plist(&self) -> &InfoPlist {
|
||||
match self {
|
||||
Self::App { plist, .. } => plist,
|
||||
Self::LocalPath { plist, .. } => plist,
|
||||
}
|
||||
}
|
||||
|
||||
fn launch(&self, url: String) -> anyhow::Result<()> {
|
||||
fn path(&self) -> &Path {
|
||||
match self {
|
||||
Self::App { app_bundle, .. } => app_bundle,
|
||||
Self::LocalPath { executable, .. } => executable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(&self, args: Vec<String>) -> Result<()> {
|
||||
let path = match self {
|
||||
Self::App { app_bundle, .. } => app_bundle.join("Contents/MacOS/zed"),
|
||||
Self::LocalPath { executable, .. } => executable.clone(),
|
||||
};
|
||||
Command::new(path).args(args).status()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn launch(&self) -> anyhow::Result<(IpcSender<CliRequest>, IpcReceiver<CliResponse>)> {
|
||||
let (server, server_name) =
|
||||
IpcOneShotServer::<IpcHandshake>::new().context("Handshake before Zed spawn")?;
|
||||
let url = format!("zed-cli://{server_name}");
|
||||
|
||||
match self {
|
||||
Self::App { app_bundle, .. } => {
|
||||
let app_path = app_bundle;
|
||||
@@ -440,23 +368,18 @@ mod mac_os {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle {
|
||||
fn plist(&self) -> &InfoPlist {
|
||||
match self {
|
||||
Self::App { plist, .. } => plist,
|
||||
Self::LocalPath { plist, .. } => plist,
|
||||
}
|
||||
let (_, handshake) = server.accept().context("Handshake after Zed spawn")?;
|
||||
Ok((handshake.requests, handshake.responses))
|
||||
}
|
||||
|
||||
fn path(&self) -> &Path {
|
||||
match self {
|
||||
Self::App { app_bundle, .. } => app_bundle,
|
||||
Self::LocalPath { executable, .. } => executable,
|
||||
}
|
||||
pub fn zed_version_string(&self) -> String {
|
||||
let is_dev = matches!(self, Self::LocalPath { .. });
|
||||
format!(
|
||||
"Zed {}{} – {}",
|
||||
self.plist().bundle_short_version_string,
|
||||
if is_dev { " (dev)" } else { "" },
|
||||
self.path().display(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,38 +16,39 @@ doctest = false
|
||||
test-support = ["clock/test-support", "collections/test-support", "gpui/test-support", "rpc/test-support"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { version = "0.16", features = ["async-std", "async-native-tls"] }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
util.workspace = true
|
||||
release_channel.workspace = true
|
||||
rpc.workspace = true
|
||||
text.workspace = true
|
||||
settings.workspace = true
|
||||
feature_flags.workspace = true
|
||||
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { version = "0.16", features = ["async-std", "async-native-tls"] }
|
||||
futures.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
once_cell.workspace = true
|
||||
once_cell = "1.19.0"
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
release_channel.workspace = true
|
||||
rpc.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
sha2.workspace = true
|
||||
smol.workspace = true
|
||||
sysinfo.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
tempfile.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
clock = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -30,7 +30,6 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use std::fmt;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
convert::TryFrom,
|
||||
@@ -66,13 +65,6 @@ impl fmt::Display for DevServerToken {
|
||||
lazy_static! {
|
||||
static ref ZED_SERVER_URL: Option<String> = std::env::var("ZED_SERVER_URL").ok();
|
||||
static ref ZED_RPC_URL: Option<String> = std::env::var("ZED_RPC_URL").ok();
|
||||
/// An environment variable whose presence indicates that the development auth
|
||||
/// provider should be used.
|
||||
///
|
||||
/// Only works in development. Setting this environment variable in other release
|
||||
/// channels is a no-op.
|
||||
pub static ref ZED_DEVELOPMENT_AUTH: bool =
|
||||
std::env::var("ZED_DEVELOPMENT_AUTH").map_or(false, |value| !value.is_empty());
|
||||
pub static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
@@ -108,7 +100,7 @@ impl Settings for ClientSettings {
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let mut result = sources.json_merge::<Self>()?;
|
||||
if let Some(server_url) = &*ZED_SERVER_URL {
|
||||
result.server_url.clone_from(&server_url)
|
||||
result.server_url = server_url.clone()
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
@@ -169,7 +161,6 @@ pub struct Client {
|
||||
peer: Arc<Peer>,
|
||||
http: Arc<HttpClientWithUrl>,
|
||||
telemetry: Arc<Telemetry>,
|
||||
credentials_provider: Arc<dyn CredentialsProvider + Send + Sync + 'static>,
|
||||
state: RwLock<ClientState>,
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
@@ -307,32 +298,6 @@ impl Credentials {
|
||||
}
|
||||
}
|
||||
|
||||
/// A provider for [`Credentials`].
|
||||
///
|
||||
/// Used to abstract over reading and writing credentials to some form of
|
||||
/// persistence (like the system keychain).
|
||||
trait CredentialsProvider {
|
||||
/// Reads the credentials from the provider.
|
||||
fn read_credentials<'a>(
|
||||
&'a self,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Option<Credentials>> + 'a>>;
|
||||
|
||||
/// Writes the credentials to the provider.
|
||||
fn write_credentials<'a>(
|
||||
&'a self,
|
||||
user_id: u64,
|
||||
access_token: String,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>;
|
||||
|
||||
/// Deletes the credentials from the provider.
|
||||
fn delete_credentials<'a>(
|
||||
&'a self,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>;
|
||||
}
|
||||
|
||||
impl Default for ClientState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
@@ -478,27 +443,11 @@ impl Client {
|
||||
http: Arc<HttpClientWithUrl>,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<Self> {
|
||||
let use_zed_development_auth = match ReleaseChannel::try_global(cx) {
|
||||
Some(ReleaseChannel::Dev) => *ZED_DEVELOPMENT_AUTH,
|
||||
Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable)
|
||||
| None => false,
|
||||
};
|
||||
|
||||
let credentials_provider: Arc<dyn CredentialsProvider + Send + Sync + 'static> =
|
||||
if use_zed_development_auth {
|
||||
Arc::new(DevelopmentCredentialsProvider {
|
||||
path: util::paths::CONFIG_DIR.join("development_auth"),
|
||||
})
|
||||
} else {
|
||||
Arc::new(KeychainCredentialsProvider)
|
||||
};
|
||||
|
||||
Arc::new(Self {
|
||||
id: AtomicU64::new(0),
|
||||
peer: Peer::new(0),
|
||||
telemetry: Telemetry::new(clock, http.clone(), cx),
|
||||
http,
|
||||
credentials_provider,
|
||||
state: Default::default(),
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -814,11 +763,8 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn has_credentials(&self, cx: &AsyncAppContext) -> bool {
|
||||
self.credentials_provider
|
||||
.read_credentials(cx)
|
||||
.await
|
||||
.is_some()
|
||||
pub async fn has_keychain_credentials(&self, cx: &AsyncAppContext) -> bool {
|
||||
read_credentials_from_keychain(cx).await.is_some()
|
||||
}
|
||||
|
||||
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
|
||||
@@ -829,7 +775,7 @@ impl Client {
|
||||
#[async_recursion(?Send)]
|
||||
pub async fn authenticate_and_connect(
|
||||
self: &Arc<Self>,
|
||||
try_provider: bool,
|
||||
try_keychain: bool,
|
||||
cx: &AsyncAppContext,
|
||||
) -> anyhow::Result<()> {
|
||||
let was_disconnected = match *self.status().borrow() {
|
||||
@@ -850,13 +796,12 @@ impl Client {
|
||||
self.set_status(Status::Reauthenticating, cx)
|
||||
}
|
||||
|
||||
let mut read_from_provider = false;
|
||||
let mut read_from_keychain = false;
|
||||
let mut credentials = self.state.read().credentials.clone();
|
||||
if credentials.is_none() && try_provider {
|
||||
credentials = self.credentials_provider.read_credentials(cx).await;
|
||||
read_from_provider = credentials.is_some();
|
||||
if credentials.is_none() && try_keychain {
|
||||
credentials = read_credentials_from_keychain(cx).await;
|
||||
read_from_keychain = credentials.is_some();
|
||||
}
|
||||
|
||||
if credentials.is_none() {
|
||||
let mut status_rx = self.status();
|
||||
let _ = status_rx.next().await;
|
||||
@@ -893,9 +838,9 @@ impl Client {
|
||||
match connection {
|
||||
Ok(conn) => {
|
||||
self.state.write().credentials = Some(credentials.clone());
|
||||
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
|
||||
if !read_from_keychain && IMPERSONATE_LOGIN.is_none() {
|
||||
if let Credentials::User{user_id, access_token} = credentials {
|
||||
self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
|
||||
write_credentials_to_keychain(user_id, access_token, cx).await.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -909,8 +854,8 @@ impl Client {
|
||||
}
|
||||
Err(EstablishConnectionError::Unauthorized) => {
|
||||
self.state.write().credentials.take();
|
||||
if read_from_provider {
|
||||
self.credentials_provider.delete_credentials(cx).await.log_err();
|
||||
if read_from_keychain {
|
||||
delete_credentials_from_keychain(cx).await.log_err();
|
||||
self.set_status(Status::SignedOut, cx);
|
||||
self.authenticate_and_connect(false, cx).await
|
||||
} else {
|
||||
@@ -1319,11 +1264,8 @@ impl Client {
|
||||
self.state.write().credentials = None;
|
||||
self.disconnect(&cx);
|
||||
|
||||
if self.has_credentials(cx).await {
|
||||
self.credentials_provider
|
||||
.delete_credentials(cx)
|
||||
.await
|
||||
.log_err();
|
||||
if self.has_keychain_credentials(cx).await {
|
||||
delete_credentials_from_keychain(cx).await.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1523,128 +1465,41 @@ impl Client {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct DevelopmentCredentials {
|
||||
async fn read_credentials_from_keychain(cx: &AsyncAppContext) -> Option<Credentials> {
|
||||
if IMPERSONATE_LOGIN.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (user_id, access_token) = cx
|
||||
.update(|cx| cx.read_credentials(&ClientSettings::get_global(cx).server_url))
|
||||
.log_err()?
|
||||
.await
|
||||
.log_err()??;
|
||||
|
||||
Some(Credentials::User {
|
||||
user_id: user_id.parse().ok()?,
|
||||
access_token: String::from_utf8(access_token).ok()?,
|
||||
})
|
||||
}
|
||||
|
||||
async fn write_credentials_to_keychain(
|
||||
user_id: u64,
|
||||
access_token: String,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
cx.update(move |cx| {
|
||||
cx.write_credentials(
|
||||
&ClientSettings::get_global(cx).server_url,
|
||||
&user_id.to_string(),
|
||||
access_token.as_bytes(),
|
||||
)
|
||||
})?
|
||||
.await
|
||||
}
|
||||
|
||||
/// A credentials provider that stores credentials in a local file.
|
||||
///
|
||||
/// This MUST only be used in development, as this is not a secure way of storing
|
||||
/// credentials on user machines.
|
||||
///
|
||||
/// Its existence is purely to work around the annoyance of having to constantly
|
||||
/// re-allow access to the system keychain when developing Zed.
|
||||
struct DevelopmentCredentialsProvider {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl CredentialsProvider for DevelopmentCredentialsProvider {
|
||||
fn read_credentials<'a>(
|
||||
&'a self,
|
||||
_cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Option<Credentials>> + 'a>> {
|
||||
async move {
|
||||
if IMPERSONATE_LOGIN.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let json = std::fs::read(&self.path).log_err()?;
|
||||
|
||||
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
|
||||
|
||||
Some(Credentials::User {
|
||||
user_id: credentials.user_id,
|
||||
access_token: credentials.access_token,
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn write_credentials<'a>(
|
||||
&'a self,
|
||||
user_id: u64,
|
||||
access_token: String,
|
||||
_cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
|
||||
async move {
|
||||
let json = serde_json::to_string(&DevelopmentCredentials {
|
||||
user_id,
|
||||
access_token,
|
||||
})?;
|
||||
|
||||
std::fs::write(&self.path, json)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn delete_credentials<'a>(
|
||||
&'a self,
|
||||
_cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
|
||||
async move { Ok(std::fs::remove_file(&self.path)?) }.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
/// A credentials provider that stores credentials in the system keychain.
|
||||
struct KeychainCredentialsProvider;
|
||||
|
||||
impl CredentialsProvider for KeychainCredentialsProvider {
|
||||
fn read_credentials<'a>(
|
||||
&'a self,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Option<Credentials>> + 'a>> {
|
||||
async move {
|
||||
if IMPERSONATE_LOGIN.is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (user_id, access_token) = cx
|
||||
.update(|cx| cx.read_credentials(&ClientSettings::get_global(cx).server_url))
|
||||
.log_err()?
|
||||
.await
|
||||
.log_err()??;
|
||||
|
||||
Some(Credentials::User {
|
||||
user_id: user_id.parse().ok()?,
|
||||
access_token: String::from_utf8(access_token).ok()?,
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn write_credentials<'a>(
|
||||
&'a self,
|
||||
user_id: u64,
|
||||
access_token: String,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
|
||||
async move {
|
||||
cx.update(move |cx| {
|
||||
cx.write_credentials(
|
||||
&ClientSettings::get_global(cx).server_url,
|
||||
&user_id.to_string(),
|
||||
access_token.as_bytes(),
|
||||
)
|
||||
})?
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
fn delete_credentials<'a>(
|
||||
&'a self,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
|
||||
async move {
|
||||
cx.update(move |cx| cx.delete_credentials(&ClientSettings::get_global(cx).server_url))?
|
||||
.await
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
async fn delete_credentials_from_keychain(cx: &AsyncAppContext) -> Result<()> {
|
||||
cx.update(move |cx| cx.delete_credentials(&ClientSettings::get_global(cx).server_url))?
|
||||
.await
|
||||
}
|
||||
|
||||
/// prefix for the zed:// url scheme
|
||||
|
||||
@@ -12,7 +12,7 @@ use settings::{Settings, SettingsStore};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::Write;
|
||||
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use sysinfo::{CpuRefreshKind, MemoryRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CopilotEvent, CpuEvent,
|
||||
EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, MemoryEvent,
|
||||
@@ -171,38 +171,40 @@ impl Telemetry {
|
||||
drop(state);
|
||||
|
||||
let this = self.clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut system = System::new_with_specifics(
|
||||
RefreshKind::new().with_cpu(CpuRefreshKind::everything()),
|
||||
);
|
||||
cx.spawn(|_| async move {
|
||||
// Avoiding calling `System::new_all()`, as there have been crashes related to it
|
||||
let refresh_kind = RefreshKind::new()
|
||||
.with_memory(MemoryRefreshKind::everything()) // For memory usage
|
||||
.with_processes(ProcessRefreshKind::everything()) // For process usage
|
||||
.with_cpu(CpuRefreshKind::everything()); // For core count
|
||||
|
||||
let mut system = System::new_with_specifics(refresh_kind);
|
||||
|
||||
// Avoiding calling `refresh_all()`, just update what we need
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60);
|
||||
|
||||
loop {
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
system.refresh_specifics(refresh_kind);
|
||||
|
||||
let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory();
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_process_specifics(current_process, refresh_kind);
|
||||
let Some(process) = system.processes().get(¤t_process) else {
|
||||
let process = current_process;
|
||||
log::error!("Failed to find own process {process:?} in system process table");
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60);
|
||||
|
||||
loop {
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_process_specifics(current_process, refresh_kind);
|
||||
let Some(process) = system.process(current_process) else {
|
||||
log::error!(
|
||||
"Failed to find own process {current_process:?} in system process table"
|
||||
);
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
this.report_memory_event(process.memory(), process.virtual_memory());
|
||||
this.report_cpu_event(process.cpu_usage(), system.cpus().len() as u32);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
this.report_memory_event(process.memory(), process.virtual_memory());
|
||||
this.report_cpu_event(process.cpu_usage(), system.cpus().len() as u32);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn set_authenticated_user_info(
|
||||
@@ -217,7 +219,7 @@ impl Telemetry {
|
||||
}
|
||||
|
||||
let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
|
||||
state.metrics_id.clone_from(&metrics_id);
|
||||
state.metrics_id = metrics_id.clone();
|
||||
state.is_staff = Some(is_staff);
|
||||
drop(state);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ pub struct DevServerId(pub u64);
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
pub struct DevServerProjectId(pub u64);
|
||||
pub struct RemoteProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ParticipantIndex(pub u32);
|
||||
|
||||
@@ -39,7 +39,6 @@ live_kit_server.workspace = true
|
||||
log.workspace = true
|
||||
nanoid.workspace = true
|
||||
open_ai.workspace = true
|
||||
supermaven_api.workspace = true
|
||||
parking_lot.workspace = true
|
||||
prometheus = "0.13"
|
||||
prost.workspace = true
|
||||
@@ -83,7 +82,6 @@ env_logger.workspace = true
|
||||
file_finder.workspace = true
|
||||
fs = { workspace = true, features = ["test-support"] }
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
git_hosting_providers.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
@@ -95,7 +93,7 @@ notifications = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
release_channel.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
remote_projects.workspace = true
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -172,11 +172,6 @@ spec:
|
||||
secretKeyRef:
|
||||
name: slack
|
||||
key: panics_webhook
|
||||
- name: SUPERMAVEN_ADMIN_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: supermaven
|
||||
key: api_key
|
||||
- name: INVITE_LINK_PREFIX
|
||||
value: ${INVITE_LINK_PREFIX}
|
||||
- name: RUST_BACKTRACE
|
||||
|
||||
@@ -51,7 +51,7 @@ CREATE TABLE "projects" (
|
||||
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"hosted_project_id" INTEGER REFERENCES hosted_projects (id),
|
||||
"dev_server_project_id" INTEGER REFERENCES dev_server_projects(id)
|
||||
"remote_project_id" INTEGER REFERENCES remote_projects(id)
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
|
||||
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
|
||||
@@ -410,8 +410,10 @@ CREATE TABLE dev_servers (
|
||||
hashed_token TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dev_server_projects (
|
||||
CREATE TABLE remote_projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
|
||||
path TEXT NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE hosted_projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id);
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
CREATE TABLE dev_server_projects (
|
||||
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY (START WITH 100),
|
||||
dev_server_id INT NOT NULL REFERENCES dev_servers(id) ON DELETE CASCADE,
|
||||
path TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO dev_server_projects OVERRIDING SYSTEM VALUE SELECT * FROM remote_projects;
|
||||
|
||||
ALTER TABLE dev_server_projects ADD CONSTRAINT uix_dev_server_projects_dev_server_id_path UNIQUE(dev_server_id, path);
|
||||
|
||||
ALTER TABLE projects ADD COLUMN dev_server_project_id INTEGER REFERENCES dev_server_projects(id);
|
||||
UPDATE projects SET dev_server_project_id = remote_project_id;
|
||||
@@ -1,2 +0,0 @@
|
||||
ALTER TABLE projects DROP COLUMN remote_project_id;
|
||||
DROP TABLE remote_projects;
|
||||
@@ -116,6 +116,13 @@ struct CreateUserParams {
|
||||
invite_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug)]
|
||||
struct CreateUserResponse {
|
||||
user: User,
|
||||
signup_device_id: Option<String>,
|
||||
metrics_id: String,
|
||||
}
|
||||
|
||||
async fn get_rpc_server_snapshot(
|
||||
Extension(rpc_server): Extension<Option<Arc<rpc::Server>>>,
|
||||
) -> Result<ErasedJson> {
|
||||
|
||||
@@ -264,7 +264,7 @@ pub async fn post_hang(
|
||||
Error::Internal(anyhow!(err))
|
||||
})?;
|
||||
|
||||
let mut backtrace = "Possible hang detected on main thread:".to_string();
|
||||
let mut backtrace = "Possible hang detected on main threadL".to_string();
|
||||
let unknown = "<unknown>".to_string();
|
||||
for frame in report.backtrace.iter() {
|
||||
backtrace.push_str(&format!("\n{}", frame.symbols.first().unwrap_or(&unknown)));
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use rpc::proto;
|
||||
@@ -415,7 +415,7 @@ impl Database {
|
||||
if is_serialization_error(error) && prev_attempt_count < SLEEPS.len() {
|
||||
let base_delay = SLEEPS[prev_attempt_count];
|
||||
let randomized_delay = base_delay * self.rng.lock().await.gen_range(0.5..=2.0);
|
||||
log::warn!(
|
||||
log::info!(
|
||||
"retrying transaction after serialization error. delay: {} ms.",
|
||||
randomized_delay
|
||||
);
|
||||
@@ -762,7 +762,7 @@ pub struct Project {
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
pub dev_server_project_id: Option<DevServerProjectId>,
|
||||
pub remote_project_id: Option<RemoteProjectId>,
|
||||
}
|
||||
|
||||
pub struct ProjectCollaborator {
|
||||
|
||||
@@ -84,7 +84,7 @@ id_type!(NotificationId);
|
||||
id_type!(NotificationKindId);
|
||||
id_type!(ProjectCollaboratorId);
|
||||
id_type!(ProjectId);
|
||||
id_type!(DevServerProjectId);
|
||||
id_type!(RemoteProjectId);
|
||||
id_type!(ReplicaId);
|
||||
id_type!(RoomId);
|
||||
id_type!(RoomParticipantId);
|
||||
|
||||
@@ -5,7 +5,6 @@ pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod dev_server_projects;
|
||||
pub mod dev_servers;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
@@ -14,6 +13,7 @@ pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod projects;
|
||||
pub mod rate_buckets;
|
||||
pub mod remote_projects;
|
||||
pub mod rooms;
|
||||
pub mod servers;
|
||||
pub mod users;
|
||||
|
||||
@@ -1,340 +0,0 @@
|
||||
use anyhow::anyhow;
|
||||
use rpc::{
|
||||
proto::{self},
|
||||
ConnectionId,
|
||||
};
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
|
||||
ModelTrait, QueryFilter,
|
||||
};
|
||||
|
||||
use crate::db::ProjectId;
|
||||
|
||||
use super::{
|
||||
dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
|
||||
DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
|
||||
};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
) -> crate::Result<dev_server_project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(
|
||||
dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
anyhow!("no dev server project with id {}", dev_server_project_id)
|
||||
})?,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_projects_for_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> crate::Result<Vec<proto::DevServerProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
self.get_projects_for_dev_server_internal(dev_server_id, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_projects_for_dev_server_internal(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<proto::DevServerProject>> {
|
||||
let servers = dev_server_project::Entity::find()
|
||||
.filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
|
||||
.find_also_related(project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(dev_server_project, project)| proto::DevServerProject {
|
||||
id: dev_server_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: dev_server_project.dev_server_id.to_proto(),
|
||||
path: dev_server_project.path,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn dev_server_project_ids_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<DevServerProjectId>> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.find_with_related(dev_server_project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_servers
|
||||
.into_iter()
|
||||
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn owner_for_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<UserId> {
|
||||
let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.and_then(|(_, dev_server)| dev_server)
|
||||
.ok_or_else(|| anyhow!("no dev server project"))?;
|
||||
|
||||
Ok(dev_server.user_id)
|
||||
}
|
||||
|
||||
pub async fn get_stale_dev_server_projects(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ProjectId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let projects = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id))
|
||||
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(projects.into_iter().map(|p| p.id).collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_dev_server_project(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
path: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
|
||||
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
dev_server_id: ActiveValue::Set(dev_server_id),
|
||||
path: ActiveValue::Set(path.to_string()),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((project, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
project::Entity::delete_many()
|
||||
.filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected != 1 {
|
||||
return Err(anyhow!(
|
||||
"no dev server project with id {}",
|
||||
dev_server_project_id
|
||||
))?;
|
||||
}
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let projects = self
|
||||
.get_projects_for_dev_server_internal(dev_server_id, &tx)
|
||||
.await?;
|
||||
Ok((projects, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> crate::Result<(
|
||||
proto::DevServerProject,
|
||||
UserId,
|
||||
proto::DevServerProjectsUpdate,
|
||||
)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
|
||||
let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
anyhow!("no dev server project with id {}", dev_server_project_id)
|
||||
})?;
|
||||
|
||||
if dev_server_project.dev_server_id != dev_server_id {
|
||||
return Err(anyhow!("dev server project shared from wrong server"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
host_user_id: ActiveValue::Set(None),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(dev_server.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((
|
||||
dev_server_project.to_proto(Some(project)),
|
||||
dev_server.user_id,
|
||||
status,
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reshare_dev_server_projects(
|
||||
&self,
|
||||
reshared_projects: &Vec<proto::UpdateProject>,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ResharedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for reshared_project in reshared_projects {
|
||||
let project_id = ProjectId::from_proto(reshared_project.project_id);
|
||||
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(dev_server_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
|
||||
if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
|
||||
return Err(anyhow!("dev server project reshared from wrong server"))?;
|
||||
}
|
||||
|
||||
let Ok(old_connection_id) = project.host_connection() else {
|
||||
return Err(anyhow!("dev server project was not shared"))?;
|
||||
};
|
||||
|
||||
project::Entity::update(project::ActiveModel {
|
||||
id: ActiveValue::set(project_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
ret.push(super::ResharedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators: collaborators
|
||||
.iter()
|
||||
.map(|collaborator| super::ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
});
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_dev_server_projects(
|
||||
&self,
|
||||
rejoined_projects: &Vec<proto::RejoinProject>,
|
||||
user_id: UserId,
|
||||
connection_id: ConnectionId,
|
||||
) -> crate::Result<Vec<RejoinedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for rejoined_project in rejoined_projects {
|
||||
if let Some(project) = self
|
||||
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
|
||||
.await?
|
||||
{
|
||||
ret.push(project);
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use sea_orm::{
|
||||
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
|
||||
};
|
||||
|
||||
use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
|
||||
use super::{dev_server, remote_project, Database, DevServerId, UserId};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server(
|
||||
@@ -29,43 +29,43 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dev_server_projects_update(
|
||||
pub async fn remote_projects_update(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
self.dev_server_projects_update_internal(user_id, &tx).await
|
||||
})
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
self.transaction(
|
||||
|tx| async move { self.remote_projects_update_internal(user_id, &tx).await },
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dev_server_projects_update_internal(
|
||||
pub async fn remote_projects_update_internal(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = dev_server_project::Entity::find()
|
||||
let remote_projects = remote_project::Entity::find()
|
||||
.filter(
|
||||
dev_server_project::Column::DevServerId
|
||||
remote_project::Column::DevServerId
|
||||
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
|
||||
)
|
||||
.find_also_related(super::project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(proto::DevServerProjectsUpdate {
|
||||
Ok(proto::RemoteProjectsUpdate {
|
||||
dev_servers: dev_servers
|
||||
.into_iter()
|
||||
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
|
||||
.collect(),
|
||||
dev_server_projects: dev_server_projects
|
||||
remote_projects: remote_projects
|
||||
.into_iter()
|
||||
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
|
||||
.map(|(remote_project, project)| remote_project.to_proto(project))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
@@ -75,86 +75,20 @@ impl Database {
|
||||
name: &str,
|
||||
hashed_access_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
|
||||
) -> crate::Result<(dev_server::Model, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
if name.trim().is_empty() {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
|
||||
name: ActiveValue::Set(name.trim().to_string()),
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
let remote_projects = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok((dev_server, dev_server_projects))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_dev_server_token(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
hashed_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server::Entity::update(dev_server::ActiveModel {
|
||||
hashed_token: ActiveValue::Set(hashed_token.to_string()),
|
||||
..dev_server.clone().into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rename_dev_server(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
name: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id || name.trim().is_empty() {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server::Entity::update(dev_server::ActiveModel {
|
||||
name: ActiveValue::Set(name.trim().to_string()),
|
||||
..dev_server.clone().into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
Ok((dev_server, remote_projects))
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -163,7 +97,7 @@ impl Database {
|
||||
&self,
|
||||
id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
) -> crate::Result<proto::RemoteProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
@@ -172,8 +106,8 @@ impl Database {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server_project::Entity::delete_many()
|
||||
.filter(dev_server_project::Column::DevServerId.eq(id))
|
||||
remote_project::Entity::delete_many()
|
||||
.filter(remote_project::Column::DevServerId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
@@ -181,11 +115,9 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
let remote_projects = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
Ok(remote_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
dev_server_project_id: Option<DevServerProjectId>,
|
||||
remote_project_id: Option<RemoteProjectId>,
|
||||
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
@@ -59,9 +59,9 @@ impl Database {
|
||||
return Err(anyhow!("guests cannot share projects"))?;
|
||||
}
|
||||
|
||||
if let Some(dev_server_project_id) = dev_server_project_id {
|
||||
if let Some(remote_project_id) = remote_project_id {
|
||||
let project = project::Entity::find()
|
||||
.filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
|
||||
.filter(project::Column::RemoteProjectId.eq(Some(remote_project_id)))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project"))?;
|
||||
@@ -78,6 +78,7 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
// todo! check user is a project-collaborator
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
return Ok((project.id, room));
|
||||
}
|
||||
@@ -91,7 +92,7 @@ impl Database {
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
dev_server_project_id: ActiveValue::Set(None),
|
||||
remote_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -130,21 +131,13 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_project(&self, project_id: ProjectId) -> Result<()> {
|
||||
self.weak_transaction(|tx| async move {
|
||||
project::Entity::delete_by_id(project_id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Unshares the given project.
|
||||
pub async fn unshare_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
user_id: Option<UserId>,
|
||||
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
@@ -157,13 +150,16 @@ impl Database {
|
||||
None
|
||||
};
|
||||
if project.host_connection()? == connection {
|
||||
return Ok((true, room, guest_connection_ids));
|
||||
project::Entity::delete(project.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
return Ok((room, guest_connection_ids));
|
||||
}
|
||||
if let Some(dev_server_project_id) = project.dev_server_project_id {
|
||||
if let Some(remote_project_id) = project.remote_project_id {
|
||||
if let Some(user_id) = user_id {
|
||||
if user_id
|
||||
!= self
|
||||
.owner_for_dev_server_project(dev_server_project_id, &tx)
|
||||
.owner_for_remote_project(remote_project_id, &tx)
|
||||
.await?
|
||||
{
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
@@ -174,7 +170,7 @@ impl Database {
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
return Ok((false, room, guest_connection_ids));
|
||||
return Ok((room, guest_connection_ids));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -602,17 +598,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find()
|
||||
.filter(project::Column::DevServerProjectId.eq(id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified project
|
||||
/// in the current room.
|
||||
pub async fn join_project(
|
||||
@@ -812,7 +797,7 @@ impl Database {
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
dev_server_project_id: project.dev_server_project_id,
|
||||
remote_project_id: project.remote_project_id,
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
}
|
||||
@@ -972,8 +957,8 @@ impl Database {
|
||||
capability: Capability,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(project::Model, ChannelRole)> {
|
||||
let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(dev_server_project::Entity)
|
||||
let (mut project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(remote_project::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
@@ -1001,8 +986,8 @@ impl Database {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
|
||||
let role_from_remote_project = if let Some(remote_project) = remote_project {
|
||||
let dev_server = dev_server::Entity::find_by_id(remote_project.dev_server_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
@@ -1026,7 +1011,7 @@ impl Database {
|
||||
None
|
||||
};
|
||||
|
||||
let role = role_from_dev_server
|
||||
let role = role_from_remote_project
|
||||
.or(role_from_room)
|
||||
.unwrap_or(ChannelRole::Banned);
|
||||
|
||||
|
||||
@@ -1 +1,283 @@
|
||||
use anyhow::anyhow;
|
||||
use rpc::{proto, ConnectionId};
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
|
||||
ModelTrait, QueryFilter,
|
||||
};
|
||||
|
||||
use crate::db::ProjectId;
|
||||
|
||||
use super::{
|
||||
dev_server, project, project_collaborator, remote_project, worktree, Database, DevServerId,
|
||||
RejoinedProject, RemoteProjectId, ResharedProject, ServerId, UserId,
|
||||
};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
) -> crate::Result<remote_project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(remote_project::Entity::find_by_id(remote_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_remote_projects_for_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> crate::Result<Vec<proto::RemoteProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
let servers = remote_project::Entity::find()
|
||||
.filter(remote_project::Column::DevServerId.eq(dev_server_id))
|
||||
.find_also_related(project::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(remote_project, project)| proto::RemoteProject {
|
||||
id: remote_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: remote_project.dev_server_id.to_proto(),
|
||||
path: remote_project.path,
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remote_project_ids_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<RemoteProjectId>> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.find_with_related(remote_project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_servers
|
||||
.into_iter()
|
||||
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn owner_for_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<UserId> {
|
||||
let dev_server = remote_project::Entity::find_by_id(remote_project_id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.and_then(|(_, dev_server)| dev_server)
|
||||
.ok_or_else(|| anyhow!("no remote project"))?;
|
||||
|
||||
Ok(dev_server.user_id)
|
||||
}
|
||||
|
||||
pub async fn get_stale_dev_server_projects(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ProjectId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let projects = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id))
|
||||
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(projects.into_iter().map(|p| p.id).collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_remote_project(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
path: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(remote_project::Model, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
|
||||
let project = remote_project::Entity::insert(remote_project::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
dev_server_id: ActiveValue::Set(dev_server_id),
|
||||
path: ActiveValue::Set(path.to_string()),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let status = self.remote_projects_update_internal(user_id, &tx).await?;
|
||||
|
||||
Ok((project, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> crate::Result<(proto::RemoteProject, UserId, proto::RemoteProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
|
||||
let remote_project = remote_project::Entity::find_by_id(remote_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?;
|
||||
|
||||
if remote_project.dev_server_id != dev_server_id {
|
||||
return Err(anyhow!("remote project shared from wrong server"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
host_user_id: ActiveValue::Set(None),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
remote_project_id: ActiveValue::Set(Some(remote_project_id)),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let status = self
|
||||
.remote_projects_update_internal(dev_server.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((
|
||||
remote_project.to_proto(Some(project)),
|
||||
dev_server.user_id,
|
||||
status,
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reshare_remote_projects(
|
||||
&self,
|
||||
reshared_projects: &Vec<proto::UpdateProject>,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ResharedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for reshared_project in reshared_projects {
|
||||
let project_id = ProjectId::from_proto(reshared_project.project_id);
|
||||
let (project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(remote_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
|
||||
if remote_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
|
||||
return Err(anyhow!("remote project reshared from wrong server"))?;
|
||||
}
|
||||
|
||||
let Ok(old_connection_id) = project.host_connection() else {
|
||||
return Err(anyhow!("remote project was not shared"))?;
|
||||
};
|
||||
|
||||
project::Entity::update(project::ActiveModel {
|
||||
id: ActiveValue::set(project_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
ret.push(super::ResharedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators: collaborators
|
||||
.iter()
|
||||
.map(|collaborator| super::ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
});
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_remote_projects(
|
||||
&self,
|
||||
rejoined_projects: &Vec<proto::RejoinProject>,
|
||||
user_id: UserId,
|
||||
connection_id: ConnectionId,
|
||||
) -> crate::Result<Vec<RejoinedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for rejoined_project in rejoined_projects {
|
||||
if let Some(project) = self
|
||||
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
|
||||
.await?
|
||||
{
|
||||
ret.push(project);
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -851,17 +851,17 @@ impl Database {
|
||||
.await?;
|
||||
|
||||
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
|
||||
let dev_server_projects_for_user = self
|
||||
.dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
|
||||
let remote_projects_for_user = self
|
||||
.remote_project_ids_for_user(leaving_participant.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects_to_unshare = project::Entity::find()
|
||||
let remote_projects_to_unshare = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::RoomId.eq(room_id))
|
||||
.add(
|
||||
project::Column::DevServerProjectId
|
||||
.is_in(dev_server_projects_for_user.clone()),
|
||||
project::Column::RemoteProjectId
|
||||
.is_in(remote_projects_for_user.clone()),
|
||||
),
|
||||
)
|
||||
.all(&*tx)
|
||||
@@ -892,7 +892,7 @@ impl Database {
|
||||
}
|
||||
|
||||
if (collaborator.is_host && collaborator.connection() == connection)
|
||||
|| dev_server_projects_to_unshare.contains(&collaborator.project_id)
|
||||
|| remote_projects_to_unshare.contains(&collaborator.project_id)
|
||||
{
|
||||
left_project.should_unshare = true;
|
||||
}
|
||||
@@ -936,9 +936,9 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if !dev_server_projects_to_unshare.is_empty() {
|
||||
if !remote_projects_to_unshare.is_empty() {
|
||||
project::Entity::update_many()
|
||||
.filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
|
||||
.filter(project::Column::Id.is_in(remote_projects_to_unshare))
|
||||
.set(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..Default::default()
|
||||
@@ -1316,10 +1316,8 @@ impl Database {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
} else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
|
||||
let host = self
|
||||
.owner_for_dev_server_project(dev_server_project_id, tx)
|
||||
.await?;
|
||||
} else if let Some(remote_project_id) = db_project.remote_project_id {
|
||||
let host = self.owner_for_remote_project(remote_project_id, tx).await?;
|
||||
if let Some((_, participant)) = participants
|
||||
.iter_mut()
|
||||
.find(|(_, v)| v.user_id == host.to_proto())
|
||||
|
||||
@@ -11,7 +11,6 @@ pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod dev_server;
|
||||
pub mod dev_server_project;
|
||||
pub mod embedding;
|
||||
pub mod extension;
|
||||
pub mod extension_version;
|
||||
@@ -26,6 +25,7 @@ pub mod observed_channel_messages;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod rate_buckets;
|
||||
pub mod remote_project;
|
||||
pub mod room;
|
||||
pub mod room_participant;
|
||||
pub mod server;
|
||||
|
||||
@@ -16,11 +16,11 @@ impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::dev_server_project::Entity")]
|
||||
#[sea_orm(has_many = "super::remote_project::Entity")]
|
||||
RemoteProject,
|
||||
}
|
||||
|
||||
impl Related<super::dev_server_project::Entity> for Entity {
|
||||
impl Related<super::remote_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{HostedProjectId, ProjectId, RemoteProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -13,7 +13,7 @@ pub struct Model {
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
pub hosted_project_id: Option<HostedProjectId>,
|
||||
pub dev_server_project_id: Option<DevServerProjectId>,
|
||||
pub remote_project_id: Option<RemoteProjectId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -58,9 +58,9 @@ pub enum Relation {
|
||||
)]
|
||||
HostedProject,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::dev_server_project::Entity",
|
||||
from = "Column::DevServerProjectId",
|
||||
to = "super::dev_server_project::Column::Id"
|
||||
belongs_to = "super::remote_project::Entity",
|
||||
from = "Column::RemoteProjectId",
|
||||
to = "super::remote_project::Column::Id"
|
||||
)]
|
||||
RemoteProject,
|
||||
}
|
||||
@@ -101,7 +101,7 @@ impl Related<super::hosted_project::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::dev_server_project::Entity> for Entity {
|
||||
impl Related<super::remote_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use super::project;
|
||||
use crate::db::{DevServerId, DevServerProjectId};
|
||||
use crate::db::{DevServerId, RemoteProjectId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "dev_server_projects")]
|
||||
#[sea_orm(table_name = "remote_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: DevServerProjectId,
|
||||
pub id: RemoteProjectId,
|
||||
pub dev_server_id: DevServerId,
|
||||
pub path: String,
|
||||
}
|
||||
@@ -39,8 +39,8 @@ impl Related<super::dev_server::Entity> for Entity {
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
|
||||
proto::DevServerProject {
|
||||
pub fn to_proto(&self, project: Option<project::Model>) -> proto::RemoteProject {
|
||||
proto::RemoteProject {
|
||||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
@@ -138,7 +138,6 @@ pub struct Config {
|
||||
pub zed_client_checksum_seed: Option<String>,
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
||||
@@ -4,9 +4,9 @@ use crate::{
|
||||
auth,
|
||||
db::{
|
||||
self, dev_server, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser,
|
||||
CreatedChannelMessage, Database, DevServerId, DevServerProjectId, InviteMemberResult,
|
||||
MembershipUpdated, MessageId, NotificationId, PrincipalId, Project, ProjectId,
|
||||
RejoinedProject, RemoveChannelMemberResult, ReplicaId, RespondToChannelInvite, RoomId,
|
||||
CreatedChannelMessage, Database, DevServerId, InviteMemberResult, MembershipUpdated,
|
||||
MessageId, NotificationId, PrincipalId, Project, ProjectId, RejoinedProject,
|
||||
RemoteProjectId, RemoveChannelMemberResult, ReplicaId, RespondToChannelInvite, RoomId,
|
||||
ServerId, UpdatedChannelMessage, User, UserId,
|
||||
},
|
||||
executor::Executor,
|
||||
@@ -34,7 +34,6 @@ pub use connection_pool::{ConnectionPool, ZedVersion};
|
||||
use core::fmt::{self, Debug, Formatter};
|
||||
use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL};
|
||||
use sha2::Digest;
|
||||
use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi};
|
||||
|
||||
use futures::{
|
||||
channel::oneshot,
|
||||
@@ -75,8 +74,6 @@ use tracing::{
|
||||
};
|
||||
use util::http::IsahcHttpClient;
|
||||
|
||||
use self::connection_pool::VersionedMessage;
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
// kubernetes gives terminated pods 10s to shutdown gracefully. After they're gone, we can clean up old resources.
|
||||
@@ -149,8 +146,7 @@ struct Session {
|
||||
peer: Arc<Peer>,
|
||||
connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
|
||||
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
supermaven_client: Option<Arc<SupermavenAdminApi>>,
|
||||
http_client: Arc<IsahcHttpClient>,
|
||||
http_client: IsahcHttpClient,
|
||||
rate_limiter: Arc<RateLimiter>,
|
||||
_executor: Executor,
|
||||
}
|
||||
@@ -191,14 +187,6 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_staff(&self) -> bool {
|
||||
match &self.principal {
|
||||
Principal::User(user) => user.admin,
|
||||
Principal::Impersonated { .. } => true,
|
||||
Principal::DevServer(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn dev_server_id(&self) -> Option<DevServerId> {
|
||||
match &self.principal {
|
||||
Principal::User(_) | Principal::Impersonated { .. } => None,
|
||||
@@ -243,14 +231,6 @@ impl UserSession {
|
||||
pub fn user_id(&self) -> UserId {
|
||||
self.0.user_id().unwrap()
|
||||
}
|
||||
|
||||
pub fn email(&self) -> Option<String> {
|
||||
match &self.0.principal {
|
||||
Principal::User(user) => user.email_address.clone(),
|
||||
Principal::Impersonated { user, .. } => user.email_address.clone(),
|
||||
Principal::DevServer(..) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for UserSession {
|
||||
@@ -429,14 +409,11 @@ impl Server {
|
||||
.add_message_handler(unshare_project)
|
||||
.add_request_handler(user_handler(join_project))
|
||||
.add_request_handler(user_handler(join_hosted_project))
|
||||
.add_request_handler(user_handler(rejoin_dev_server_projects))
|
||||
.add_request_handler(user_handler(create_dev_server_project))
|
||||
.add_request_handler(user_handler(delete_dev_server_project))
|
||||
.add_request_handler(user_handler(rejoin_remote_projects))
|
||||
.add_request_handler(user_handler(create_remote_project))
|
||||
.add_request_handler(user_handler(create_dev_server))
|
||||
.add_request_handler(user_handler(regenerate_dev_server_token))
|
||||
.add_request_handler(user_handler(rename_dev_server))
|
||||
.add_request_handler(user_handler(delete_dev_server))
|
||||
.add_request_handler(dev_server_handler(share_dev_server_project))
|
||||
.add_request_handler(dev_server_handler(share_remote_project))
|
||||
.add_request_handler(dev_server_handler(shutdown_dev_server))
|
||||
.add_request_handler(dev_server_handler(reconnect_dev_server))
|
||||
.add_message_handler(user_message_handler(leave_project))
|
||||
@@ -488,9 +465,6 @@ impl Server {
|
||||
.add_request_handler(user_handler(
|
||||
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_versioned_mutating_project_request::<proto::OpenNewBuffer>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_mutating_project_request::<proto::ResolveCompletionDocumentation>,
|
||||
))
|
||||
@@ -531,7 +505,7 @@ impl Server {
|
||||
forward_mutating_project_request::<proto::OnTypeFormatting>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_versioned_mutating_project_request::<proto::SaveBuffer>,
|
||||
forward_mutating_project_request::<proto::SaveBuffer>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_mutating_project_request::<proto::BlameBuffer>,
|
||||
@@ -581,7 +555,6 @@ impl Server {
|
||||
.add_request_handler(user_handler(get_private_user_info))
|
||||
.add_message_handler(user_message_handler(acknowledge_channel_message))
|
||||
.add_message_handler(user_message_handler(acknowledge_buffer_version))
|
||||
.add_request_handler(user_handler(get_supermaven_api_key))
|
||||
.add_streaming_request_handler({
|
||||
let app_state = app_state.clone();
|
||||
move |request, response, session| {
|
||||
@@ -959,22 +932,13 @@ impl Server {
|
||||
tracing::info!("connection opened");
|
||||
|
||||
let http_client = match IsahcHttpClient::new() {
|
||||
Ok(http_client) => Arc::new(http_client),
|
||||
Ok(http_client) => http_client,
|
||||
Err(error) => {
|
||||
tracing::error!(?error, "failed to create HTTP client");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let supermaven_client = if let Some(supermaven_admin_api_key) = this.app_state.config.supermaven_admin_api_key.clone() {
|
||||
Some(Arc::new(SupermavenAdminApi::new(
|
||||
supermaven_admin_api_key.to_string(),
|
||||
http_client.clone(),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let session = Session {
|
||||
principal: principal.clone(),
|
||||
connection_id,
|
||||
@@ -985,7 +949,6 @@ impl Server {
|
||||
http_client,
|
||||
rate_limiter: this.app_state.rate_limiter.clone(),
|
||||
_executor: executor.clone(),
|
||||
supermaven_client,
|
||||
};
|
||||
|
||||
if let Err(error) = this.send_initial_client_update(connection_id, &principal, zed_version, send_connection_id, &session).await {
|
||||
@@ -1099,12 +1062,12 @@ impl Server {
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (contacts, channels_for_user, channel_invites, dev_server_projects) =
|
||||
let (contacts, channels_for_user, channel_invites, remote_projects) =
|
||||
future::try_join4(
|
||||
self.app_state.db.get_contacts(user.id),
|
||||
self.app_state.db.get_channels_for_user(user.id),
|
||||
self.app_state.db.get_channel_invites_for_user(user.id),
|
||||
self.app_state.db.dev_server_projects_update(user.id),
|
||||
self.app_state.db.remote_projects_update(user.id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -1127,7 +1090,7 @@ impl Server {
|
||||
build_channels_update(channels_for_user, channel_invites),
|
||||
)?;
|
||||
}
|
||||
send_dev_server_projects_update(user.id, dev_server_projects, session).await;
|
||||
send_remote_projects_update(user.id, remote_projects, session).await;
|
||||
|
||||
if let Some(incoming_call) =
|
||||
self.app_state.db.incoming_call_for_user(user.id).await?
|
||||
@@ -1149,7 +1112,7 @@ impl Server {
|
||||
let projects = self
|
||||
.app_state
|
||||
.db
|
||||
.get_projects_for_dev_server(dev_server.id)
|
||||
.get_remote_projects_for_dev_server(dev_server.id)
|
||||
.await?;
|
||||
self.peer
|
||||
.send(connection_id, proto::DevServerInstructions { projects })?;
|
||||
@@ -1157,9 +1120,9 @@ impl Server {
|
||||
let status = self
|
||||
.app_state
|
||||
.db
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.remote_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
send_remote_projects_update(dev_server.user_id, status, &session).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2002,8 +1965,8 @@ async fn share_project(
|
||||
session.connection_id,
|
||||
&request.worktrees,
|
||||
request
|
||||
.dev_server_project_id
|
||||
.map(|id| DevServerProjectId::from_proto(id)),
|
||||
.remote_project_id
|
||||
.map(|id| RemoteProjectId::from_proto(id)),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::ShareProjectResponse {
|
||||
@@ -2032,60 +1995,49 @@ async fn unshare_project_internal(
|
||||
user_id: Option<UserId>,
|
||||
session: &Session,
|
||||
) -> Result<()> {
|
||||
let delete = {
|
||||
let room_guard = session
|
||||
.db()
|
||||
.await
|
||||
.unshare_project(project_id, connection_id, user_id)
|
||||
.await?;
|
||||
let (room, guest_connection_ids) = &*session
|
||||
.db()
|
||||
.await
|
||||
.unshare_project(project_id, connection_id, user_id)
|
||||
.await?;
|
||||
|
||||
let (delete, room, guest_connection_ids) = &*room_guard;
|
||||
|
||||
let message = proto::UnshareProject {
|
||||
project_id: project_id.to_proto(),
|
||||
};
|
||||
|
||||
broadcast(
|
||||
Some(connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|conn_id| session.peer.send(conn_id, message.clone()),
|
||||
);
|
||||
if let Some(room) = room {
|
||||
room_updated(room, &session.peer);
|
||||
}
|
||||
|
||||
*delete
|
||||
let message = proto::UnshareProject {
|
||||
project_id: project_id.to_proto(),
|
||||
};
|
||||
|
||||
if delete {
|
||||
let db = session.db().await;
|
||||
db.delete_project(project_id).await?;
|
||||
broadcast(
|
||||
Some(connection_id),
|
||||
guest_connection_ids.iter().copied(),
|
||||
|conn_id| session.peer.send(conn_id, message.clone()),
|
||||
);
|
||||
if let Some(room) = room {
|
||||
room_updated(room, &session.peer);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// DevServer makes a project available online
|
||||
async fn share_dev_server_project(
|
||||
request: proto::ShareDevServerProject,
|
||||
response: Response<proto::ShareDevServerProject>,
|
||||
async fn share_remote_project(
|
||||
request: proto::ShareRemoteProject,
|
||||
response: Response<proto::ShareRemoteProject>,
|
||||
session: DevServerSession,
|
||||
) -> Result<()> {
|
||||
let (dev_server_project, user_id, status) = session
|
||||
let (remote_project, user_id, status) = session
|
||||
.db()
|
||||
.await
|
||||
.share_dev_server_project(
|
||||
DevServerProjectId::from_proto(request.dev_server_project_id),
|
||||
.share_remote_project(
|
||||
RemoteProjectId::from_proto(request.remote_project_id),
|
||||
session.dev_server_id(),
|
||||
session.connection_id,
|
||||
&request.worktrees,
|
||||
)
|
||||
.await?;
|
||||
let Some(project_id) = dev_server_project.project_id else {
|
||||
let Some(project_id) = remote_project.project_id else {
|
||||
return Err(anyhow!("failed to share remote project"))?;
|
||||
};
|
||||
|
||||
send_dev_server_projects_update(user_id, status, &session).await;
|
||||
send_remote_projects_update(user_id, status, &session).await;
|
||||
|
||||
response.send(proto::ShareProjectResponse { project_id })?;
|
||||
|
||||
@@ -2178,9 +2130,9 @@ fn join_project_internal(
|
||||
collaborators: collaborators.clone(),
|
||||
language_servers: project.language_servers.clone(),
|
||||
role: project.role.into(),
|
||||
dev_server_project_id: project
|
||||
.dev_server_project_id
|
||||
.map(|dev_server_project_id| dev_server_project_id.0 as u64),
|
||||
remote_project_id: project
|
||||
.remote_project_id
|
||||
.map(|remote_project_id| remote_project_id.0 as u64),
|
||||
})?;
|
||||
|
||||
for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
|
||||
@@ -2292,9 +2244,9 @@ async fn join_hosted_project(
|
||||
join_project_internal(response, session, &mut project, &replica_id)
|
||||
}
|
||||
|
||||
async fn create_dev_server_project(
|
||||
request: proto::CreateDevServerProject,
|
||||
response: Response<proto::CreateDevServerProject>,
|
||||
async fn create_remote_project(
|
||||
request: proto::CreateRemoteProject,
|
||||
response: Response<proto::CreateRemoteProject>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let dev_server_id = DevServerId(request.dev_server_id as i32);
|
||||
@@ -2315,14 +2267,14 @@ async fn create_dev_server_project(
|
||||
.forward_request(
|
||||
session.connection_id,
|
||||
dev_server_connection_id,
|
||||
proto::ValidateDevServerProjectRequest { path: path.clone() },
|
||||
proto::ValidateRemoteProjectRequest { path: path.clone() },
|
||||
)
|
||||
.await?;
|
||||
|
||||
let (dev_server_project, update) = session
|
||||
let (remote_project, update) = session
|
||||
.db()
|
||||
.await
|
||||
.create_dev_server_project(
|
||||
.create_remote_project(
|
||||
DevServerId(request.dev_server_id as i32),
|
||||
&request.path,
|
||||
session.user_id(),
|
||||
@@ -2332,7 +2284,7 @@ async fn create_dev_server_project(
|
||||
let projects = session
|
||||
.db()
|
||||
.await
|
||||
.get_projects_for_dev_server(dev_server_project.dev_server_id)
|
||||
.get_remote_projects_for_dev_server(remote_project.dev_server_id)
|
||||
.await?;
|
||||
|
||||
session.peer.send(
|
||||
@@ -2340,10 +2292,10 @@ async fn create_dev_server_project(
|
||||
proto::DevServerInstructions { projects },
|
||||
)?;
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), update, &session).await;
|
||||
send_remote_projects_update(session.user_id(), update, &session).await;
|
||||
|
||||
response.send(proto::CreateDevServerProjectResponse {
|
||||
dev_server_project: Some(dev_server_project.to_proto(None)),
|
||||
response.send(proto::CreateRemoteProjectResponse {
|
||||
remote_project: Some(remote_project.to_proto(None)),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -2356,19 +2308,13 @@ async fn create_dev_server(
|
||||
let access_token = auth::random_token();
|
||||
let hashed_access_token = auth::hash_access_token(&access_token);
|
||||
|
||||
if request.name.is_empty() {
|
||||
return Err(proto::ErrorCode::Forbidden
|
||||
.message("Dev server name cannot be empty".to_string())
|
||||
.anyhow())?;
|
||||
}
|
||||
|
||||
let (dev_server, status) = session
|
||||
.db()
|
||||
.await
|
||||
.create_dev_server(&request.name, &hashed_access_token, session.user_id())
|
||||
.await?;
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), status, &session).await;
|
||||
send_remote_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::CreateDevServerResponse {
|
||||
dev_server_id: dev_server.id.0 as u64,
|
||||
@@ -2378,71 +2324,6 @@ async fn create_dev_server(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn regenerate_dev_server_token(
|
||||
request: proto::RegenerateDevServerToken,
|
||||
response: Response<proto::RegenerateDevServerToken>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let dev_server_id = DevServerId(request.dev_server_id as i32);
|
||||
let access_token = auth::random_token();
|
||||
let hashed_access_token = auth::hash_access_token(&access_token);
|
||||
|
||||
let connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id(dev_server_id);
|
||||
if let Some(connection_id) = connection_id {
|
||||
shutdown_dev_server_internal(dev_server_id, connection_id, &session).await?;
|
||||
session
|
||||
.peer
|
||||
.send(connection_id, proto::ShutdownDevServer {})?;
|
||||
let _ = remove_dev_server_connection(dev_server_id, &session).await;
|
||||
}
|
||||
|
||||
let status = session
|
||||
.db()
|
||||
.await
|
||||
.update_dev_server_token(dev_server_id, &hashed_access_token, session.user_id())
|
||||
.await?;
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::RegenerateDevServerTokenResponse {
|
||||
dev_server_id: dev_server_id.to_proto(),
|
||||
access_token: auth::generate_dev_server_token(dev_server_id.0 as usize, access_token),
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rename_dev_server(
|
||||
request: proto::RenameDevServer,
|
||||
response: Response<proto::RenameDevServer>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
if request.name.trim().is_empty() {
|
||||
return Err(proto::ErrorCode::Forbidden
|
||||
.message("Dev server name cannot be empty".to_string())
|
||||
.anyhow())?;
|
||||
}
|
||||
|
||||
let dev_server_id = DevServerId(request.dev_server_id as i32);
|
||||
let dev_server = session.db().await.get_dev_server(dev_server_id).await?;
|
||||
if dev_server.user_id != session.user_id() {
|
||||
return Err(anyhow!(ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
let status = session
|
||||
.db()
|
||||
.await
|
||||
.rename_dev_server(dev_server_id, &request.name, session.user_id())
|
||||
.await?;
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::Ack {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_dev_server(
|
||||
request: proto::DeleteDevServer,
|
||||
response: Response<proto::DeleteDevServer>,
|
||||
@@ -2463,7 +2344,6 @@ async fn delete_dev_server(
|
||||
session
|
||||
.peer
|
||||
.send(connection_id, proto::ShutdownDevServer {})?;
|
||||
let _ = remove_dev_server_connection(dev_server_id, &session).await;
|
||||
}
|
||||
|
||||
let status = session
|
||||
@@ -2472,82 +2352,20 @@ async fn delete_dev_server(
|
||||
.delete_dev_server(dev_server_id, session.user_id())
|
||||
.await?;
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), status, &session).await;
|
||||
send_remote_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::Ack {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn delete_dev_server_project(
|
||||
request: proto::DeleteDevServerProject,
|
||||
response: Response<proto::DeleteDevServerProject>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let dev_server_project_id = DevServerProjectId(request.dev_server_project_id as i32);
|
||||
let dev_server_project = session
|
||||
.db()
|
||||
.await
|
||||
.get_dev_server_project(dev_server_project_id)
|
||||
.await?;
|
||||
|
||||
let dev_server = session
|
||||
.db()
|
||||
.await
|
||||
.get_dev_server(dev_server_project.dev_server_id)
|
||||
.await?;
|
||||
if dev_server.user_id != session.user_id() {
|
||||
return Err(anyhow!(ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
let dev_server_connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id(dev_server.id);
|
||||
|
||||
if let Some(dev_server_connection_id) = dev_server_connection_id {
|
||||
let project = session
|
||||
.db()
|
||||
.await
|
||||
.find_dev_server_project(dev_server_project_id)
|
||||
.await;
|
||||
if let Ok(project) = project {
|
||||
unshare_project_internal(
|
||||
project.id,
|
||||
dev_server_connection_id,
|
||||
Some(session.user_id()),
|
||||
&session,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
let (projects, status) = session
|
||||
.db()
|
||||
.await
|
||||
.delete_dev_server_project(dev_server_project_id, dev_server.id, session.user_id())
|
||||
.await?;
|
||||
|
||||
if let Some(dev_server_connection_id) = dev_server_connection_id {
|
||||
session.peer.send(
|
||||
dev_server_connection_id,
|
||||
proto::DevServerInstructions { projects },
|
||||
)?;
|
||||
}
|
||||
|
||||
send_dev_server_projects_update(session.user_id(), status, &session).await;
|
||||
|
||||
response.send(proto::Ack {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rejoin_dev_server_projects(
|
||||
async fn rejoin_remote_projects(
|
||||
request: proto::RejoinRemoteProjects,
|
||||
response: Response<proto::RejoinRemoteProjects>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let mut rejoined_projects = {
|
||||
let db = session.db().await;
|
||||
db.rejoin_dev_server_projects(
|
||||
db.rejoin_remote_projects(
|
||||
&request.rejoined_projects,
|
||||
session.user_id(),
|
||||
session.0.connection_id,
|
||||
@@ -2571,7 +2389,7 @@ async fn reconnect_dev_server(
|
||||
) -> Result<()> {
|
||||
let reshared_projects = {
|
||||
let db = session.db().await;
|
||||
db.reshare_dev_server_projects(
|
||||
db.reshare_remote_projects(
|
||||
&request.reshared_projects,
|
||||
session.dev_server_id(),
|
||||
session.0.connection_id,
|
||||
@@ -2636,8 +2454,7 @@ async fn shutdown_dev_server(
|
||||
session: DevServerSession,
|
||||
) -> Result<()> {
|
||||
response.send(proto::Ack {})?;
|
||||
shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await?;
|
||||
remove_dev_server_connection(session.dev_server_id(), &session).await
|
||||
shutdown_dev_server_internal(session.dev_server_id(), session.connection_id, &session).await
|
||||
}
|
||||
|
||||
async fn shutdown_dev_server_internal(
|
||||
@@ -2645,14 +2462,14 @@ async fn shutdown_dev_server_internal(
|
||||
connection_id: ConnectionId,
|
||||
session: &Session,
|
||||
) -> Result<()> {
|
||||
let (dev_server_projects, dev_server) = {
|
||||
let (remote_projects, dev_server) = {
|
||||
let db = session.db().await;
|
||||
let dev_server_projects = db.get_projects_for_dev_server(dev_server_id).await?;
|
||||
let remote_projects = db.get_remote_projects_for_dev_server(dev_server_id).await?;
|
||||
let dev_server = db.get_dev_server(dev_server_id).await?;
|
||||
(dev_server_projects, dev_server)
|
||||
(remote_projects, dev_server)
|
||||
};
|
||||
|
||||
for project_id in dev_server_projects.iter().filter_map(|p| p.project_id) {
|
||||
for project_id in remote_projects.iter().filter_map(|p| p.project_id) {
|
||||
unshare_project_internal(
|
||||
ProjectId::from_proto(project_id),
|
||||
connection_id,
|
||||
@@ -2670,28 +2487,13 @@ async fn shutdown_dev_server_internal(
|
||||
let status = session
|
||||
.db()
|
||||
.await
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.remote_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
send_remote_projects_update(dev_server.user_id, status, &session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn remove_dev_server_connection(dev_server_id: DevServerId, session: &Session) -> Result<()> {
|
||||
let dev_server_connection = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id(dev_server_id);
|
||||
|
||||
if let Some(dev_server_connection) = dev_server_connection {
|
||||
session
|
||||
.connection_pool()
|
||||
.await
|
||||
.remove_connection(dev_server_connection)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Updates other participants with changes to the project
|
||||
async fn update_project(
|
||||
request: proto::UpdateProject,
|
||||
@@ -2875,7 +2677,6 @@ where
|
||||
T: EntityMessage + RequestMessage,
|
||||
{
|
||||
let project_id = ProjectId::from_proto(request.remote_entity_id());
|
||||
|
||||
let host_connection_id = session
|
||||
.db()
|
||||
.await
|
||||
@@ -2889,45 +2690,6 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// forward a project request to the host. These requests are disallowed
|
||||
/// for guests.
|
||||
async fn forward_versioned_mutating_project_request<T>(
|
||||
request: T,
|
||||
response: Response<T>,
|
||||
session: UserSession,
|
||||
) -> Result<()>
|
||||
where
|
||||
T: EntityMessage + RequestMessage + VersionedMessage,
|
||||
{
|
||||
let project_id = ProjectId::from_proto(request.remote_entity_id());
|
||||
|
||||
let host_connection_id = session
|
||||
.db()
|
||||
.await
|
||||
.host_for_mutating_project_request(project_id, session.connection_id, session.user_id())
|
||||
.await?;
|
||||
if let Some(host_version) = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.connection(host_connection_id)
|
||||
.map(|c| c.zed_version)
|
||||
{
|
||||
if let Some(min_required_version) = request.required_host_version() {
|
||||
if min_required_version > host_version {
|
||||
return Err(anyhow!(ErrorCode::RemoteUpgradeRequired
|
||||
.with_tag("required", &min_required_version.to_string())))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, request)
|
||||
.await?;
|
||||
response.send(payload)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Notify other participants that a new buffer has been created
|
||||
async fn create_buffer_for_peer(
|
||||
request: proto::CreateBufferForPeer,
|
||||
@@ -4340,7 +4102,7 @@ async fn complete_with_open_ai(
|
||||
api_key: Arc<str>,
|
||||
) -> Result<()> {
|
||||
let mut completion_stream = open_ai::stream_completion(
|
||||
session.http_client.as_ref(),
|
||||
&session.http_client,
|
||||
OPEN_AI_API_URL,
|
||||
&api_key,
|
||||
crate::ai::language_model_request_to_open_ai(request)?,
|
||||
@@ -4404,7 +4166,7 @@ async fn complete_with_google_ai(
|
||||
api_key: Arc<str>,
|
||||
) -> Result<()> {
|
||||
let mut stream = google_ai::stream_generate_content(
|
||||
session.http_client.clone(),
|
||||
&session.http_client,
|
||||
google_ai::API_URL,
|
||||
api_key.as_ref(),
|
||||
crate::ai::language_model_request_to_google_ai(request)?,
|
||||
@@ -4488,7 +4250,7 @@ async fn complete_with_anthropic(
|
||||
.collect();
|
||||
|
||||
let mut stream = anthropic::stream_completion(
|
||||
session.http_client.clone(),
|
||||
&session.http_client,
|
||||
"https://api.anthropic.com",
|
||||
&api_key,
|
||||
anthropic::Request {
|
||||
@@ -4612,7 +4374,7 @@ async fn count_tokens_with_language_model(
|
||||
let api_key = google_ai_api_key
|
||||
.ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?;
|
||||
let tokens_response = google_ai::count_tokens(
|
||||
session.http_client.as_ref(),
|
||||
&session.http_client,
|
||||
google_ai::API_URL,
|
||||
&api_key,
|
||||
crate::ai::count_tokens_request_to_google_ai(request)?,
|
||||
@@ -4631,7 +4393,7 @@ impl RateLimit for ComputeEmbeddingsRateLimit {
|
||||
std::env::var("EMBED_TEXTS_RATE_LIMIT_PER_HOUR")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(5000) // Picked arbitrarily
|
||||
.unwrap_or(120) // Picked arbitrarily
|
||||
}
|
||||
|
||||
fn refill_duration() -> chrono::Duration {
|
||||
@@ -4660,7 +4422,7 @@ async fn compute_embeddings(
|
||||
let embeddings = match request.model.as_str() {
|
||||
"openai/text-embedding-3-small" => {
|
||||
open_ai::embed(
|
||||
session.http_client.as_ref(),
|
||||
&session.http_client,
|
||||
OPEN_AI_API_URL,
|
||||
&api_key,
|
||||
OpenAiEmbeddingModel::TextEmbedding3Small,
|
||||
@@ -4703,6 +4465,25 @@ async fn compute_embeddings(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct GetCachedEmbeddingsRateLimit;
|
||||
|
||||
impl RateLimit for GetCachedEmbeddingsRateLimit {
|
||||
fn capacity() -> usize {
|
||||
std::env::var("EMBED_TEXTS_RATE_LIMIT_PER_HOUR")
|
||||
.ok()
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(120) // Picked arbitrarily
|
||||
}
|
||||
|
||||
fn refill_duration() -> chrono::Duration {
|
||||
chrono::Duration::hours(1)
|
||||
}
|
||||
|
||||
fn db_name() -> &'static str {
|
||||
"get-cached-embeddings"
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_embeddings(
|
||||
request: proto::GetCachedEmbeddings,
|
||||
response: Response<proto::GetCachedEmbeddings>,
|
||||
@@ -4710,6 +4491,11 @@ async fn get_cached_embeddings(
|
||||
) -> Result<()> {
|
||||
authorize_access_to_language_models(&session).await?;
|
||||
|
||||
session
|
||||
.rate_limiter
|
||||
.check::<GetCachedEmbeddingsRateLimit>(session.user_id())
|
||||
.await?;
|
||||
|
||||
let db = session.db().await;
|
||||
let embeddings = db.get_embeddings(&request.model, &request.digests).await?;
|
||||
|
||||
@@ -4732,37 +4518,6 @@ async fn authorize_access_to_language_models(session: &UserSession) -> Result<()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a Supermaven API key for the user
|
||||
async fn get_supermaven_api_key(
|
||||
_request: proto::GetSupermavenApiKey,
|
||||
response: Response<proto::GetSupermavenApiKey>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let user_id: String = session.user_id().to_string();
|
||||
if !session.is_staff() {
|
||||
return Err(anyhow!("supermaven not enabled for this account"))?;
|
||||
}
|
||||
|
||||
let email = session
|
||||
.email()
|
||||
.ok_or_else(|| anyhow!("user must have an email"))?;
|
||||
|
||||
let supermaven_admin_api = session
|
||||
.supermaven_client
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("supermaven not configured"))?;
|
||||
|
||||
let result = supermaven_admin_api
|
||||
.try_get_or_create_user(CreateExternalUserRequest { id: user_id, email })
|
||||
.await?;
|
||||
|
||||
response.send(proto::GetSupermavenApiKeyResponse {
|
||||
api_key: result.api_key,
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Start receiving chat updates for a channel
|
||||
async fn join_channel_chat(
|
||||
request: proto::JoinChannelChat,
|
||||
@@ -5108,9 +4863,9 @@ fn channel_updated(
|
||||
);
|
||||
}
|
||||
|
||||
async fn send_dev_server_projects_update(
|
||||
async fn send_remote_projects_update(
|
||||
user_id: UserId,
|
||||
mut status: proto::DevServerProjectsUpdate,
|
||||
mut status: proto::RemoteProjectsUpdate,
|
||||
session: &Session,
|
||||
) {
|
||||
let pool = session.connection_pool().await;
|
||||
@@ -5173,13 +4928,9 @@ async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> {
|
||||
}
|
||||
|
||||
let user_id = session.dev_server().user_id;
|
||||
let update = session
|
||||
.db()
|
||||
.await
|
||||
.dev_server_projects_update(user_id)
|
||||
.await?;
|
||||
let update = session.db().await.remote_projects_update(user_id).await?;
|
||||
|
||||
send_dev_server_projects_update(user_id, update, session).await;
|
||||
send_remote_projects_update(user_id, update, session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ struct ConnectedPrincipal {
|
||||
connection_ids: HashSet<ConnectionId>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, PartialOrd, PartialEq, Eq, Ord)]
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ZedVersion(pub SemanticVersion);
|
||||
|
||||
impl fmt::Display for ZedVersion {
|
||||
@@ -34,32 +34,6 @@ impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
self.0 >= SemanticVersion::new(0, 129, 2)
|
||||
}
|
||||
|
||||
pub fn with_save_as() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 134, 0))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VersionedMessage {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl VersionedMessage for proto::SaveBuffer {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
if self.new_path.is_some() {
|
||||
Some(ZedVersion::with_save_as())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VersionedMessage for proto::OpenNewBuffer {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
Some(ZedVersion::with_save_as())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -76,10 +50,6 @@ impl ConnectionPool {
|
||||
self.channels.clear();
|
||||
}
|
||||
|
||||
pub fn connection(&mut self, connection_id: ConnectionId) -> Option<&Connection> {
|
||||
self.connections.get(&connection_id)
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub fn add_connection(
|
||||
&mut self,
|
||||
|
||||
@@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
// Clients A and B open the same file.
|
||||
|
||||
@@ -16,7 +16,7 @@ use super::TestClient;
|
||||
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client) = TestServer::start1(cx).await;
|
||||
|
||||
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
let store = cx.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
@@ -51,7 +51,7 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
store.create_remote_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
@@ -64,10 +64,10 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
||||
|
||||
let remote_workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
let projects = store.remote_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].path, "/remote");
|
||||
workspace::join_dev_server_project(
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client.app_state.clone(),
|
||||
None,
|
||||
@@ -110,7 +110,7 @@ async fn test_dev_server_env_files(
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
@@ -157,13 +157,13 @@ async fn test_dev_server_env_files(
|
||||
});
|
||||
}
|
||||
|
||||
async fn create_dev_server_project(
|
||||
async fn create_remote_project(
|
||||
server: &TestServer,
|
||||
client_app_state: Arc<AppState>,
|
||||
cx: &mut TestAppContext,
|
||||
cx_devserver: &mut TestAppContext,
|
||||
) -> (TestClient, WindowHandle<Workspace>) {
|
||||
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
let store = cx.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
@@ -190,7 +190,7 @@ async fn create_dev_server_project(
|
||||
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
store.create_remote_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
@@ -203,10 +203,10 @@ async fn create_dev_server_project(
|
||||
|
||||
let workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
let projects = store.remote_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].path, "/remote");
|
||||
workspace::join_dev_server_project(
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client_app_state,
|
||||
None,
|
||||
@@ -230,7 +230,7 @@ async fn test_dev_server_leave_room(
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
@@ -263,191 +263,6 @@ async fn test_dev_server_leave_room(
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_delete(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected()));
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_server_projects().len(), 0);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_rename(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.rename_dev_server(
|
||||
store.dev_servers().first().unwrap().id,
|
||||
"name-edited".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().first().unwrap().name, "name-edited");
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_refresh_access_token(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
cx4: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
// Regenerate the access token
|
||||
let new_token_response = cx1
|
||||
.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Assert that the other client was disconnected
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected()));
|
||||
|
||||
// Assert that the owner of the dev server does not see the dev server as online anymore
|
||||
let (workspace, cx1) = client1.active_workspace(cx1);
|
||||
cx1.update(|cx| {
|
||||
assert!(workspace.read(cx).project().read(cx).is_disconnected());
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(
|
||||
store.dev_servers().first().unwrap().status,
|
||||
DevServerStatus::Offline
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
// Reconnect the dev server with the new token
|
||||
let _dev_server = server
|
||||
.create_dev_server(new_token_response.access_token, cx4)
|
||||
.await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Assert that the dev server is online again
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().len(), 1);
|
||||
assert_eq!(
|
||||
store.dev_servers().first().unwrap().status,
|
||||
DevServerStatus::Online
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_reconnect(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
@@ -460,7 +275,7 @@ async fn test_dev_server_reconnect(
|
||||
.await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
@@ -484,12 +299,12 @@ async fn test_dev_server_reconnect(
|
||||
|
||||
let client2 = server.create_client(cx2, "user_a").await;
|
||||
|
||||
let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
let store = cx2.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
store
|
||||
.update(cx2, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
workspace::join_dev_server_project(
|
||||
let projects = store.remote_projects();
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client2.app_state.clone(),
|
||||
None,
|
||||
@@ -501,7 +316,7 @@ async fn test_dev_server_reconnect(
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_dev_server_project_path_validation(
|
||||
async fn test_create_remote_project_path_validation(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
@@ -513,11 +328,11 @@ async fn test_create_dev_server_project_path_validation(
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (_dev_server, _) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
let store = cx1.update(|cx| remote_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx1, |store, cx| {
|
||||
@@ -535,7 +350,7 @@ async fn test_create_dev_server_project_path_validation(
|
||||
// Creating a remote project with a path that does not exist should fail
|
||||
let result = store
|
||||
.update(cx1, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
store.create_remote_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/notfound".to_string(),
|
||||
cx,
|
||||
@@ -548,7 +363,7 @@ async fn test_create_dev_server_project_path_validation(
|
||||
let error = result.unwrap_err();
|
||||
assert!(matches!(
|
||||
error.error_code(),
|
||||
ErrorCode::DevServerProjectPathDoesNotExist
|
||||
ErrorCode::RemoteProjectPathDoesNotExist
|
||||
));
|
||||
}
|
||||
|
||||
@@ -558,7 +373,7 @@ async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::Tes
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
create_remote_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
|
||||
|
||||
@@ -583,33 +398,3 @@ async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::Tes
|
||||
"remote\nremote\nremote"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
|
||||
|
||||
cx.simulate_keystrokes("cmd-n");
|
||||
cx.simulate_input("new!");
|
||||
cx.simulate_keystrokes("cmd-shift-s");
|
||||
cx.simulate_input("2.txt");
|
||||
cx.simulate_keystrokes("enter");
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let title = remote_workspace
|
||||
.update(&mut cx, |ws, cx| {
|
||||
ws.active_item(cx).unwrap().tab_description(0, &cx).unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(title, "2.txt");
|
||||
|
||||
let path = Path::new("/remote/2.txt");
|
||||
assert_eq!(dev_server.fs().load(&path).await.unwrap(), "new!");
|
||||
}
|
||||
|
||||
@@ -6,18 +6,13 @@ use call::ActiveCall;
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
|
||||
RevertSelectedHunks, ToggleCodeActions, Undo,
|
||||
},
|
||||
test::{
|
||||
editor_hunks,
|
||||
editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
expanded_hunks, expanded_hunks_background_highlights,
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, Redo, Rename, RevertSelectedHunks,
|
||||
ToggleCodeActions, Undo,
|
||||
},
|
||||
test::editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
Editor,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use git::diff::DiffHunkStatus;
|
||||
use gpui::{BorrowAppContext, TestAppContext, VisualContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
@@ -78,7 +73,7 @@ async fn test_host_disconnect(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
cx_a.background_executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
|
||||
@@ -199,7 +194,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client A
|
||||
let buffer_a = project_a
|
||||
@@ -315,7 +310,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the guest.
|
||||
let buffer_b = project_b
|
||||
@@ -444,93 +439,6 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
"use d::SomeTrait;\nfn main() { a.first_method() }"
|
||||
);
|
||||
});
|
||||
|
||||
// Now we do a second completion, this time to ensure that documentation/snippets are
|
||||
// resolved
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([46..46]));
|
||||
editor.handle_input("; a", cx);
|
||||
editor.handle_input(".", cx);
|
||||
});
|
||||
|
||||
buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"use d::SomeTrait;\nfn main() { a.first_method(); a. }"
|
||||
);
|
||||
});
|
||||
|
||||
let mut completion_response = fake_language_server
|
||||
.handle_request::<lsp::request::Completion, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
lsp::Position::new(1, 32),
|
||||
);
|
||||
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
label: "third_method(…)".into(),
|
||||
detail: Some("fn(&mut self, B, C, D) -> E".into()),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
// no snippet placehodlers
|
||||
new_text: "third_method".to_string(),
|
||||
range: lsp::Range::new(
|
||||
lsp::Position::new(1, 32),
|
||||
lsp::Position::new(1, 32),
|
||||
),
|
||||
})),
|
||||
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
|
||||
documentation: None,
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
});
|
||||
|
||||
// The completion now gets a new `text_edit.new_text` when resolving the completion item
|
||||
let mut resolve_completion_response = fake_language_server
|
||||
.handle_request::<lsp::request::ResolveCompletionItem, _, _>(|params, _| async move {
|
||||
assert_eq!(params.label, "third_method(…)");
|
||||
Ok(lsp::CompletionItem {
|
||||
label: "third_method(…)".into(),
|
||||
detail: Some("fn(&mut self, B, C, D) -> E".into()),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
// Now it's a snippet
|
||||
new_text: "third_method($1, $2, $3)".to_string(),
|
||||
range: lsp::Range::new(lsp::Position::new(1, 32), lsp::Position::new(1, 32)),
|
||||
})),
|
||||
insert_text_format: Some(lsp::InsertTextFormat::SNIPPET),
|
||||
documentation: Some(lsp::Documentation::String(
|
||||
"this is the documentation".into(),
|
||||
)),
|
||||
..Default::default()
|
||||
})
|
||||
});
|
||||
|
||||
cx_b.executor().run_until_parked();
|
||||
|
||||
completion_response.next().await.unwrap();
|
||||
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert!(editor.context_menu_visible());
|
||||
editor.context_menu_first(&ContextMenuFirst {}, cx);
|
||||
});
|
||||
|
||||
resolve_completion_response.next().await.unwrap();
|
||||
cx_b.executor().run_until_parked();
|
||||
|
||||
// When accepting the completion, the snippet is insert.
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert!(editor.context_menu_visible());
|
||||
editor.confirm_completion(&ConfirmCompletion { item_ix: Some(0) }, cx);
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"use d::SomeTrait;\nfn main() { a.first_method(); a.third_method(, , ) }"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -572,7 +480,7 @@ async fn test_collaborating_with_code_actions(
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
@@ -667,7 +575,7 @@ async fn test_collaborating_with_code_actions(
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
editor.toggle_code_actions(
|
||||
&ToggleCodeActions {
|
||||
deployed_from_indicator: None,
|
||||
deployed_from_indicator: false,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
@@ -787,7 +695,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
@@ -1036,7 +944,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
.await
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
project_b.read_with(cx_b, |project, _| {
|
||||
let status = project.language_server_statuses().next().unwrap();
|
||||
@@ -1133,7 +1041,7 @@ async fn test_share_project(
|
||||
.unwrap();
|
||||
let client_b_peer_id = client_b.peer_id().unwrap();
|
||||
let project_b = client_b
|
||||
.build_dev_server_project(initial_project.id, cx_b)
|
||||
.build_remote_project(initial_project.id, cx_b)
|
||||
.await;
|
||||
|
||||
let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id());
|
||||
@@ -1237,7 +1145,7 @@ async fn test_share_project(
|
||||
.await
|
||||
.unwrap();
|
||||
let _project_c = client_c
|
||||
.build_dev_server_project(initial_project.id, cx_c)
|
||||
.build_remote_project(initial_project.id, cx_c)
|
||||
.await;
|
||||
|
||||
// Client B closes the editor, and client A sees client B's selections removed.
|
||||
@@ -1297,7 +1205,7 @@ async fn test_on_input_format_from_host_to_guest(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the host.
|
||||
let buffer_a = project_a
|
||||
@@ -1417,7 +1325,7 @@ async fn test_on_input_format_from_guest_to_host(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the guest.
|
||||
let buffer_b = project_b
|
||||
@@ -1578,7 +1486,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
.unwrap();
|
||||
|
||||
// Client B joins the project
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1838,7 +1746,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1967,7 +1875,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
async fn test_multiple_types_reverts(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
@@ -2014,7 +1922,7 @@ struct Row10;"#};
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -2073,7 +1981,7 @@ struct Row10;"#};
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
buffer.set_diff_base(Some(base_text.to_string()), cx);
|
||||
});
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
@@ -2083,14 +1991,14 @@ struct Row10;"#};
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
buffer.set_diff_base(Some(base_text.to_string()), cx);
|
||||
});
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
|
||||
// the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection
|
||||
// the host does not see the diffs toggled
|
||||
// client, selects a range in the updated buffer, and reverts it
|
||||
// both host and the client observe the reverted state (with one hunk left, not covered by client's selection)
|
||||
editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row;
|
||||
struct Row0.1;
|
||||
struct Row0.2;
|
||||
@@ -2102,100 +2010,11 @@ struct Row10;"#};
|
||||
|
||||
struct R»ow9;
|
||||
struct Row1220;"#});
|
||||
editor_cx_b
|
||||
.update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx));
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
("".to_string(), DiffHunkStatus::Added, 1..3),
|
||||
("struct Row2;\n".to_string(), DiffHunkStatus::Removed, 4..4),
|
||||
("struct Row5;\n".to_string(), DiffHunkStatus::Modified, 6..7),
|
||||
("struct Row8;\n".to_string(), DiffHunkStatus::Removed, 9..9),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
10..10,
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![1..=2, 8..=8],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
("".to_string(), DiffHunkStatus::Added, 1..3),
|
||||
("struct Row2;\n".to_string(), DiffHunkStatus::Removed, 5..5),
|
||||
("struct Row5;\n".to_string(), DiffHunkStatus::Modified, 8..9),
|
||||
(
|
||||
"struct Row8;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
12..12
|
||||
),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
13..13,
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]);
|
||||
});
|
||||
|
||||
// the client reverts the hunks, removing the expanded diffs too
|
||||
// both host and the client observe the reverted state (with one hunk left, not covered by client's selection)
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
10..10,
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![5..=5]
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
10..10,
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_a.assert_editor_state(indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
@@ -2306,7 +2125,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
|
||||
@@ -73,7 +73,7 @@ async fn test_basic_following(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -161,7 +161,7 @@ async fn test_basic_following(
|
||||
|
||||
executor.run_until_parked();
|
||||
let active_call_c = cx_c.read(ActiveCall::global);
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c);
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
|
||||
@@ -174,7 +174,7 @@ async fn test_basic_following(
|
||||
|
||||
cx_d.executor().run_until_parked();
|
||||
let active_call_d = cx_d.read(ActiveCall::global);
|
||||
let project_d = client_d.build_dev_server_project(project_id, cx_d).await;
|
||||
let project_d = client_d.build_remote_project(project_id, cx_d).await;
|
||||
let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d);
|
||||
active_call_d
|
||||
.update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
|
||||
@@ -567,7 +567,7 @@ async fn test_following_tab_order(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -684,7 +684,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.unwrap();
|
||||
|
||||
// Client B joins the project.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1197,7 +1197,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1333,7 +1333,7 @@ async fn test_peers_simultaneously_following_each_other(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
executor.run_until_parked();
|
||||
@@ -1683,7 +1683,7 @@ async fn test_following_into_excluded_file(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
|
||||
@@ -1375,7 +1375,7 @@ async fn test_unshare_project(
|
||||
.unwrap();
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
|
||||
@@ -1395,7 +1395,7 @@ async fn test_unshare_project(
|
||||
assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected()));
|
||||
|
||||
// Client C opens the project.
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
|
||||
// When client A unshares the project, client C's project becomes read-only.
|
||||
project_a
|
||||
@@ -1412,7 +1412,7 @@ async fn test_unshare_project(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c2 = client_c.build_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
|
||||
@@ -1516,9 +1516,9 @@ async fn test_project_reconnect(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await;
|
||||
let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await;
|
||||
let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await;
|
||||
let project_b1 = client_b.build_remote_project(project1_id, cx_b).await;
|
||||
let project_b2 = client_b.build_remote_project(project2_id, cx_b).await;
|
||||
let project_b3 = client_b.build_remote_project(project3_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
|
||||
@@ -2314,8 +2314,8 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
.unwrap();
|
||||
|
||||
// Join that worktree as clients B and C.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap());
|
||||
|
||||
@@ -2450,8 +2450,7 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
});
|
||||
|
||||
let new_buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.create_buffer(cx))
|
||||
.await
|
||||
.update(cx_a, |p, cx| p.create_buffer("", None, cx))
|
||||
.unwrap();
|
||||
|
||||
let new_buffer_id = new_buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id());
|
||||
@@ -2539,7 +2538,7 @@ async fn test_git_diff_base_change(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let diff_base = "
|
||||
one
|
||||
@@ -2570,10 +2569,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test diffing
|
||||
|
||||
buffer_local_a.read_with(cx_a, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2594,10 +2590,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test diffing
|
||||
|
||||
buffer_remote_a.read_with(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2617,10 +2610,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test new diffing
|
||||
|
||||
buffer_local_a.read_with(cx_a, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(new_diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
|
||||
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
@@ -2633,10 +2623,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test B
|
||||
|
||||
buffer_remote_a.read_with(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(new_diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2676,10 +2663,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test diffing
|
||||
|
||||
buffer_local_b.read_with(cx_a, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2700,10 +2684,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test diffing
|
||||
|
||||
buffer_remote_b.read_with(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2723,10 +2704,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test new diffing
|
||||
|
||||
buffer_local_b.read_with(cx_a, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(new_diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
|
||||
println!("{:?}", buffer.as_rope().to_string());
|
||||
println!("{:?}", buffer.diff_base());
|
||||
println!(
|
||||
@@ -2748,10 +2726,7 @@ async fn test_git_diff_base_change(
|
||||
// Smoke test B
|
||||
|
||||
buffer_remote_b.read_with(cx_b, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.diff_base().map(|rope| rope.to_string()).as_deref(),
|
||||
Some(new_diff_base.as_str())
|
||||
);
|
||||
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
@@ -2795,7 +2770,7 @@ async fn test_git_branch_name(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.build_remote_project(project_id, cx_b).await;
|
||||
client_a
|
||||
.fs()
|
||||
.set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
|
||||
@@ -2840,7 +2815,7 @@ async fn test_git_branch_name(
|
||||
assert_branch(Some("branch-2"), project, cx)
|
||||
});
|
||||
|
||||
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_remote_c.read_with(cx_c, |project, cx| {
|
||||
@@ -2895,7 +2870,7 @@ async fn test_git_status_sync(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Wait for it to catch up to the new status
|
||||
executor.run_until_parked();
|
||||
@@ -2971,7 +2946,7 @@ async fn test_git_status_sync(
|
||||
});
|
||||
|
||||
// And synchronization while joining
|
||||
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_remote_c.read_with(cx_c, |project, cx| {
|
||||
@@ -3019,7 +2994,7 @@ async fn test_fs_operations(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
|
||||
|
||||
@@ -3313,7 +3288,7 @@ async fn test_local_settings(
|
||||
executor.run_until_parked();
|
||||
|
||||
// As client B, join that project and observe the local settings.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
|
||||
executor.run_until_parked();
|
||||
@@ -3436,7 +3411,7 @@ async fn test_buffer_conflict_after_save(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client B
|
||||
let buffer_b = project_b
|
||||
@@ -3500,7 +3475,7 @@ async fn test_buffer_reloading(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client B
|
||||
let buffer_b = project_b
|
||||
@@ -3554,7 +3529,7 @@ async fn test_editing_while_guest_opens_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client A
|
||||
let buffer_a = project_a
|
||||
@@ -3602,7 +3577,7 @@ async fn test_leaving_worktree_while_opening_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// See that a guest has joined as client A.
|
||||
executor.run_until_parked();
|
||||
@@ -3649,7 +3624,7 @@ async fn test_canceling_buffer_opening(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
||||
@@ -3706,8 +3681,8 @@ async fn test_leaving_project(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_b1 = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
@@ -3748,7 +3723,7 @@ async fn test_leaving_project(
|
||||
});
|
||||
|
||||
// Client B re-joins the project and can open buffers as before.
|
||||
let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b2 = client_b.build_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_a.read_with(cx_a, |project, _| {
|
||||
@@ -3924,7 +3899,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
);
|
||||
|
||||
// Join the worktree as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Wait for server to see the diagnostics update.
|
||||
executor.run_until_parked();
|
||||
@@ -3949,7 +3924,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
});
|
||||
|
||||
// Join project as client C and observe the diagnostics.
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
let project_c_diagnostic_summaries =
|
||||
Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
|
||||
@@ -4157,7 +4132,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B and open all three files.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| {
|
||||
project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx))
|
||||
}))
|
||||
@@ -4263,7 +4238,7 @@ async fn test_reloading_buffer_manually(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
@@ -4361,7 +4336,7 @@ async fn test_formatting_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
@@ -4481,7 +4456,7 @@ async fn test_prettier_formatting_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
|
||||
@@ -4584,7 +4559,7 @@ async fn test_definition(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
@@ -4729,7 +4704,7 @@ async fn test_references(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx));
|
||||
@@ -4886,7 +4861,7 @@ async fn test_project_search(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Perform a search as the guest.
|
||||
let mut results = HashMap::default();
|
||||
@@ -4967,7 +4942,7 @@ async fn test_document_highlights(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
|
||||
@@ -5089,7 +5064,7 @@ async fn test_lsp_hover(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file as the guest
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
|
||||
@@ -5266,7 +5241,7 @@ async fn test_project_symbols(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Cause the language server to start.
|
||||
let open_buffer_task =
|
||||
@@ -5361,7 +5336,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap();
|
||||
@@ -6129,7 +6104,7 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_pane_split_left(cx: &mut TestAppContext) {
|
||||
async fn test_cmd_k_left(cx: &mut TestAppContext) {
|
||||
let (_, client) = TestServer::start1(cx).await;
|
||||
let (workspace, cx) = client.build_test_workspace(cx).await;
|
||||
|
||||
|
||||
@@ -217,20 +217,19 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
0..=70 => {
|
||||
// Open a remote project
|
||||
if let Some(room) = call.read_with(cx, |call, _| call.room().cloned()) {
|
||||
let existing_dev_server_project_ids = cx.read(|cx| {
|
||||
let existing_remote_project_ids = cx.read(|cx| {
|
||||
client
|
||||
.dev_server_projects()
|
||||
.remote_projects()
|
||||
.iter()
|
||||
.map(|p| p.read(cx).remote_id().unwrap())
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let new_dev_server_projects = room.read_with(cx, |room, _| {
|
||||
let new_remote_projects = room.read_with(cx, |room, _| {
|
||||
room.remote_participants()
|
||||
.values()
|
||||
.flat_map(|participant| {
|
||||
participant.projects.iter().filter_map(|project| {
|
||||
if existing_dev_server_project_ids.contains(&project.id)
|
||||
{
|
||||
if existing_remote_project_ids.contains(&project.id) {
|
||||
None
|
||||
} else {
|
||||
Some((
|
||||
@@ -242,9 +241,9 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
if !new_dev_server_projects.is_empty() {
|
||||
if !new_remote_projects.is_empty() {
|
||||
let (host_id, first_root_name) =
|
||||
new_dev_server_projects.choose(rng).unwrap().clone();
|
||||
new_remote_projects.choose(rng).unwrap().clone();
|
||||
break ClientOperation::OpenRemoteProject {
|
||||
host_id,
|
||||
first_root_name,
|
||||
@@ -260,8 +259,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
|
||||
// Close a remote project
|
||||
71..=80 => {
|
||||
if !client.dev_server_projects().is_empty() {
|
||||
let project = client.dev_server_projects().choose(rng).unwrap().clone();
|
||||
if !client.remote_projects().is_empty() {
|
||||
let project = client.remote_projects().choose(rng).unwrap().clone();
|
||||
let first_root_name = root_name_for_project(&project, cx);
|
||||
break ClientOperation::CloseRemoteProject {
|
||||
project_root_name: first_root_name,
|
||||
@@ -596,12 +595,12 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
);
|
||||
|
||||
let ix = client
|
||||
.dev_server_projects()
|
||||
.remote_projects()
|
||||
.iter()
|
||||
.position(|p| p == &project)
|
||||
.unwrap();
|
||||
cx.update(|_| {
|
||||
client.dev_server_projects_mut().remove(ix);
|
||||
client.remote_projects_mut().remove(ix);
|
||||
client.buffers().retain(|p, _| *p != project);
|
||||
drop(project);
|
||||
});
|
||||
@@ -643,7 +642,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
);
|
||||
|
||||
let project = project.await?;
|
||||
client.dev_server_projects_mut().push(project.clone());
|
||||
client.remote_projects_mut().push(project.clone());
|
||||
}
|
||||
|
||||
ClientOperation::CreateWorktreeEntry {
|
||||
@@ -1143,7 +1142,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
|
||||
async fn on_quiesce(_: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
|
||||
for (client, client_cx) in clients.iter() {
|
||||
for guest_project in client.dev_server_projects().iter() {
|
||||
for guest_project in client.remote_projects().iter() {
|
||||
guest_project.read_with(client_cx, |guest_project, cx| {
|
||||
let host_project = clients.iter().find_map(|(client, cx)| {
|
||||
let project = client
|
||||
@@ -1488,9 +1487,8 @@ fn project_for_root_name(
|
||||
if let Some(ix) = project_ix_for_root_name(client.local_projects().deref(), root_name, cx) {
|
||||
return Some(client.local_projects()[ix].clone());
|
||||
}
|
||||
if let Some(ix) = project_ix_for_root_name(client.dev_server_projects().deref(), root_name, cx)
|
||||
{
|
||||
return Some(client.dev_server_projects()[ix].clone());
|
||||
if let Some(ix) = project_ix_for_root_name(client.remote_projects().deref(), root_name, cx) {
|
||||
return Some(client.remote_projects()[ix].clone());
|
||||
}
|
||||
None
|
||||
}
|
||||
@@ -1580,7 +1578,7 @@ fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option<Model<
|
||||
.local_projects()
|
||||
.deref()
|
||||
.iter()
|
||||
.chain(client.dev_server_projects().iter())
|
||||
.chain(client.remote_projects().iter())
|
||||
.choose(rng)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
@@ -533,7 +533,7 @@ impl<T: RandomizedTest> TestPlan<T> {
|
||||
deterministic.finish_waiting();
|
||||
server.allow_connections();
|
||||
|
||||
for project in client.dev_server_projects().iter() {
|
||||
for project in client.remote_projects().iter() {
|
||||
project.read_with(&client_cx, |project, _| {
|
||||
assert!(
|
||||
project.is_disconnected(),
|
||||
|
||||
@@ -17,7 +17,6 @@ use collab_ui::channel_view::ChannelView;
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use git::GitHostingProviderRegistry;
|
||||
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
@@ -65,7 +64,7 @@ pub struct TestClient {
|
||||
#[derive(Default)]
|
||||
struct TestClientState {
|
||||
local_projects: Vec<Model<Project>>,
|
||||
dev_server_projects: Vec<Model<Project>>,
|
||||
remote_projects: Vec<Model<Project>>,
|
||||
buffers: HashMap<Model<Project>, HashSet<Model<language::Buffer>>>,
|
||||
channel_buffers: HashSet<Model<ChannelBuffer>>,
|
||||
}
|
||||
@@ -258,11 +257,6 @@ impl TestServer {
|
||||
})
|
||||
});
|
||||
|
||||
let git_hosting_provider_registry =
|
||||
cx.update(|cx| GitHostingProviderRegistry::default_global(cx));
|
||||
git_hosting_provider_registry
|
||||
.register_hosting_provider(Arc::new(git_hosting_providers::Github));
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
@@ -277,12 +271,6 @@ impl TestServer {
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
});
|
||||
|
||||
let os_keymap = if cfg!(target_os = "linux") {
|
||||
"keymaps/default-linux.json"
|
||||
} else {
|
||||
"keymaps/default-macos.json"
|
||||
};
|
||||
|
||||
cx.update(|cx| {
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init(&client, cx);
|
||||
@@ -296,8 +284,8 @@ impl TestServer {
|
||||
collab_ui::init(&app_state, cx);
|
||||
file_finder::init(cx);
|
||||
menu::init();
|
||||
dev_server_projects::init(client.clone(), cx);
|
||||
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
|
||||
remote_projects::init(client.clone(), cx);
|
||||
settings::KeymapFile::load_asset("keymaps/default-macos.json", cx).unwrap();
|
||||
});
|
||||
|
||||
client
|
||||
@@ -428,10 +416,8 @@ impl TestServer {
|
||||
node_runtime: app_state.node_runtime.clone(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
);
|
||||
});
|
||||
|
||||
TestClient {
|
||||
app_state,
|
||||
@@ -663,7 +649,6 @@ impl TestServer {
|
||||
auto_join_channel_id: None,
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
supermaven_admin_api_key: None,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -744,18 +729,16 @@ impl TestClient {
|
||||
Ref::map(self.state.borrow(), |state| &state.local_projects)
|
||||
}
|
||||
|
||||
pub fn dev_server_projects(&self) -> impl Deref<Target = Vec<Model<Project>>> + '_ {
|
||||
Ref::map(self.state.borrow(), |state| &state.dev_server_projects)
|
||||
pub fn remote_projects(&self) -> impl Deref<Target = Vec<Model<Project>>> + '_ {
|
||||
Ref::map(self.state.borrow(), |state| &state.remote_projects)
|
||||
}
|
||||
|
||||
pub fn local_projects_mut(&self) -> impl DerefMut<Target = Vec<Model<Project>>> + '_ {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
|
||||
}
|
||||
|
||||
pub fn dev_server_projects_mut(&self) -> impl DerefMut<Target = Vec<Model<Project>>> + '_ {
|
||||
RefMut::map(self.state.borrow_mut(), |state| {
|
||||
&mut state.dev_server_projects
|
||||
})
|
||||
pub fn remote_projects_mut(&self) -> impl DerefMut<Target = Vec<Model<Project>>> + '_ {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
|
||||
}
|
||||
|
||||
pub fn buffers_for_project<'a>(
|
||||
@@ -880,7 +863,7 @@ impl TestClient {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_dev_server_project(
|
||||
pub async fn build_remote_project(
|
||||
&self,
|
||||
host_project_id: u64,
|
||||
guest_cx: &mut TestAppContext,
|
||||
|
||||