Compare commits
180 Commits
v0.154.2
...
v3-editor-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
022ce7d883 | ||
|
|
250f2e76eb | ||
|
|
5f35fa5d92 | ||
|
|
84ce81caf1 | ||
|
|
8aeab4800c | ||
|
|
1021f0e288 | ||
|
|
675673ed54 | ||
|
|
3737d4eb4f | ||
|
|
0daa070448 | ||
|
|
689da9d0b1 | ||
|
|
1c5be9de4e | ||
|
|
d5f67406b0 | ||
|
|
c3075dfe9a | ||
|
|
caaa9a00a9 | ||
|
|
ffd1083cc1 | ||
|
|
6d4ecac610 | ||
|
|
dc5ffe6994 | ||
|
|
03c7f08581 | ||
|
|
73ff8c0f1f | ||
|
|
1c5d9c221a | ||
|
|
a1d2e1106e | ||
|
|
568a21a700 | ||
|
|
5199135b54 | ||
|
|
8559731e0d | ||
|
|
02d0561586 | ||
|
|
1be3c44550 | ||
|
|
32605e9ea4 | ||
|
|
c83d007138 | ||
|
|
48c6eb9ac7 | ||
|
|
e28496d4e2 | ||
|
|
c1a039a5d7 | ||
|
|
71da81c743 | ||
|
|
11058765be | ||
|
|
c7a79cfc02 | ||
|
|
84a6ded657 | ||
|
|
e5bbd378a6 | ||
|
|
82eb753b31 | ||
|
|
de1889d6a8 | ||
|
|
f143396825 | ||
|
|
7eea1a6f51 | ||
|
|
1a4f9b2891 | ||
|
|
1deed247eb | ||
|
|
db92a31067 | ||
|
|
31902a1b73 | ||
|
|
3f415f3587 | ||
|
|
140d70289e | ||
|
|
b9b689d322 | ||
|
|
2d2e20f9d4 | ||
|
|
b701eab44f | ||
|
|
6167688a63 | ||
|
|
3161aedcb0 | ||
|
|
64532e94e4 | ||
|
|
40408e731e | ||
|
|
7398f795e3 | ||
|
|
4b4565fb7a | ||
|
|
21a023980d | ||
|
|
ae6a3d15af | ||
|
|
dc7c49bd0b | ||
|
|
1eddd2f38d | ||
|
|
dc48af0ca1 | ||
|
|
1f54fde4d2 | ||
|
|
19162c3160 | ||
|
|
9300dbc834 | ||
|
|
bbf5ed2ba1 | ||
|
|
500c3c54a6 | ||
|
|
59dc3985a1 | ||
|
|
ccc871c44c | ||
|
|
4e2ae06ca6 | ||
|
|
300bf87f77 | ||
|
|
a6cb17fb51 | ||
|
|
9d197ddc99 | ||
|
|
623a6eca75 | ||
|
|
7bb510971a | ||
|
|
eb71d2f1a8 | ||
|
|
fc9db97ac7 | ||
|
|
e9bc9ed5d5 | ||
|
|
9a8601227d | ||
|
|
d33600525e | ||
|
|
fdb03d3058 | ||
|
|
c4e0f5e0ee | ||
|
|
da1ef13442 | ||
|
|
5045f984a9 | ||
|
|
2d71c36ad3 | ||
|
|
d2ffad0f34 | ||
|
|
692590bff4 | ||
|
|
87ac4cff60 | ||
|
|
9606858436 | ||
|
|
f39e54decc | ||
|
|
8a7ef4db59 | ||
|
|
fd07fef4db | ||
|
|
4a4d8c1cab | ||
|
|
b69c6ee7df | ||
|
|
0e86ba0983 | ||
|
|
5e62bbfd29 | ||
|
|
21be70f278 | ||
|
|
2470db4901 | ||
|
|
e87d6da2a6 | ||
|
|
437bcc0ce6 | ||
|
|
3a2f0653d1 | ||
|
|
336b4a5690 | ||
|
|
93a4295f66 | ||
|
|
f019ad563f | ||
|
|
399e094f02 | ||
|
|
dbc325ea12 | ||
|
|
6b56530a4a | ||
|
|
20c06545b6 | ||
|
|
d989183f94 | ||
|
|
3ba071b993 | ||
|
|
e4080ef565 | ||
|
|
e95e1c9ae5 | ||
|
|
1ff10b71c8 | ||
|
|
7051bc00c2 | ||
|
|
1efe87029b | ||
|
|
11953bbc16 | ||
|
|
65bb989c61 | ||
|
|
20826336d9 | ||
|
|
3c95a64a23 | ||
|
|
bc751d6c19 | ||
|
|
a36706aed6 | ||
|
|
35a80f07e0 | ||
|
|
2ff8dde925 | ||
|
|
d784e72027 | ||
|
|
8a36278c95 | ||
|
|
05d18321db | ||
|
|
bb7d9d3525 | ||
|
|
75cb199a54 | ||
|
|
0f4ebdfbca | ||
|
|
37c93d8fea | ||
|
|
e7fcf83ce8 | ||
|
|
1f35c8d09d | ||
|
|
3ca18af40b | ||
|
|
4f227fd3bf | ||
|
|
743feb98bc | ||
|
|
e309fbda2a | ||
|
|
5905fbb9ac | ||
|
|
7d62fda5a3 | ||
|
|
45388805ad | ||
|
|
7dac5594cd | ||
|
|
5d12e3ce3a | ||
|
|
601090511b | ||
|
|
8bd624b5db | ||
|
|
9f6ff29a54 | ||
|
|
d97427f69e | ||
|
|
99bef27300 | ||
|
|
f8195c41e0 | ||
|
|
759646e0a3 | ||
|
|
ab1d466c5f | ||
|
|
5f1046b3cd | ||
|
|
d6c184b494 | ||
|
|
16d2afc662 | ||
|
|
90a12f5564 | ||
|
|
ca033e6475 | ||
|
|
97708fdf43 | ||
|
|
ace4d5185d | ||
|
|
93730983dd | ||
|
|
579267f399 | ||
|
|
8103ac12bf | ||
|
|
15b4130fa5 | ||
|
|
ae34872f73 | ||
|
|
740803d745 | ||
|
|
edf2c19250 | ||
|
|
82e6b1e0e5 | ||
|
|
28a54ce122 | ||
|
|
fbbf0393cb | ||
|
|
00b1c81c9f | ||
|
|
27c1106fad | ||
|
|
1fc391f696 | ||
|
|
8074fba76b | ||
|
|
ac0d5d3152 | ||
|
|
c3bdc1c178 | ||
|
|
ce4f07bd3c | ||
|
|
157c57aa8d | ||
|
|
6670c9eb3b | ||
|
|
3986bcf9dc | ||
|
|
713b39bac0 | ||
|
|
3fd690ade4 | ||
|
|
e9f2e72ff0 | ||
|
|
7d0a7541bf | ||
|
|
a944bb2f24 | ||
|
|
d3d0c279b7 |
2
.github/actions/run_tests/action.yml
vendored
2
.github/actions/run_tests/action.yml
vendored
@@ -10,7 +10,7 @@ runs:
|
||||
cargo install cargo-nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
name: Update Nightly Tag
|
||||
name: Bump collab-staging Tag
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
# Fire every day at 16:00 UTC (At the start of the US workday)
|
||||
- cron: "0 16 * * *"
|
||||
|
||||
jobs:
|
||||
update-nightly-tag:
|
||||
update-collab-staging-tag:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -15,9 +15,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update nightly tag
|
||||
- name: Update collab-staging tag
|
||||
run: |
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
git tag -f collab-staging
|
||||
git push origin collab-staging --force
|
||||
2
.github/workflows/bump_patch_version.yml
vendored
2
.github/workflows/bump_patch_version.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit --features vendored-openssl
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||
output=$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')
|
||||
git commit -am "Bump to $output for @$GITHUB_ACTOR" --author "Zed Bot <hi@zed.dev>"
|
||||
git tag v${output}${tag_suffix}
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -172,7 +172,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
31
.github/workflows/close_stale_issues.yml
vendored
Normal file
31
.github/workflows/close_stale_issues.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 11 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: >
|
||||
Hi there! 👋
|
||||
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 10 days. Feel free to open a new issue if you're seeing this message after the issue has been closed.
|
||||
|
||||
Thanks for your help!
|
||||
close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!"
|
||||
# We will increase `days-before-stale` to 365 on or after Jan 24th,
|
||||
# 2024. This date marks one year since migrating issues from
|
||||
# 'community' to 'zed' repository. The migration added activity to all
|
||||
# issues, preventing 365 days from working until then.
|
||||
days-before-stale: 180
|
||||
days-before-close: 10
|
||||
any-of-issue-labels: "defect,panic / crash"
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
stale-issue-label: "stale"
|
||||
2
.github/workflows/danger.yml
vendored
2
.github/workflows/danger.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
23
.github/workflows/deploy_collab.yml
vendored
23
.github/workflows/deploy_collab.yml
vendored
@@ -8,7 +8,6 @@ on:
|
||||
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
style:
|
||||
@@ -61,11 +60,12 @@ jobs:
|
||||
- style
|
||||
- tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
|
||||
- name: Sign into DigitalOcean docker registry
|
||||
run: doctl registry login
|
||||
@@ -89,10 +89,19 @@ jobs:
|
||||
needs:
|
||||
- publish
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- deploy
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install doctl
|
||||
uses: digitalocean/action-doctl@v2
|
||||
with:
|
||||
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
|
||||
|
||||
- name: Sign into Kubernetes
|
||||
run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
|
||||
|
||||
|
||||
2
.github/workflows/randomized_tests.yml
vendored
2
.github/workflows/randomized_tests.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
30
.github/workflows/release_nightly.yml
vendored
30
.github/workflows/release_nightly.yml
vendored
@@ -1,6 +1,9 @@
|
||||
name: Release Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
push:
|
||||
tags:
|
||||
- "nightly"
|
||||
@@ -67,7 +70,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
@@ -168,3 +171,28 @@ jobs:
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- bundle-mac
|
||||
- bundle-linux-x86
|
||||
- bundle-linux-arm
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update nightly tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -10,7 +10,7 @@
|
||||
/crates/collab/seed.json
|
||||
/crates/zed/resources/flatpak/flatpak-cargo-sources.json
|
||||
/dev.zed.Zed*.json
|
||||
/assets/*licenses.md
|
||||
/assets/*licenses.*
|
||||
**/venv
|
||||
.build
|
||||
*.wasm
|
||||
|
||||
88
Cargo.lock
generated
88
Cargo.lock
generated
@@ -21,11 +21,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
version = "0.22.0"
|
||||
version = "0.24.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
|
||||
checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375"
|
||||
dependencies = [
|
||||
"gimli",
|
||||
"gimli 0.31.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -245,7 +245,6 @@ dependencies = [
|
||||
"chrono",
|
||||
"futures 0.3.30",
|
||||
"http_client",
|
||||
"isahc",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -404,6 +403,7 @@ dependencies = [
|
||||
"language_model",
|
||||
"languages",
|
||||
"log",
|
||||
"lsp",
|
||||
"markdown",
|
||||
"menu",
|
||||
"multi_buffer",
|
||||
@@ -894,9 +894,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.81"
|
||||
version = "0.1.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
|
||||
checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1493,17 +1493,17 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.73"
|
||||
version = "0.3.74"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
|
||||
checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
|
||||
dependencies = [
|
||||
"addr2line",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"miniz_oxide 0.7.4",
|
||||
"miniz_oxide 0.8.0",
|
||||
"object",
|
||||
"rustc-demangle",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2086,9 +2086,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cargo_toml"
|
||||
version = "0.20.4"
|
||||
version = "0.20.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad639525b1c67b6a298f378417b060fbc04618bea559482a8484381cce27d965"
|
||||
checksum = "88da5a13c620b4ca0078845707ea9c3faf11edbc3ffd8497d11d686211cd1ac0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"toml 0.8.19",
|
||||
@@ -2282,9 +2282,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.16"
|
||||
version = "4.5.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
|
||||
checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -2292,9 +2292,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.15"
|
||||
version = "4.5.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -2314,9 +2314,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.13"
|
||||
version = "4.5.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
|
||||
checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
@@ -2849,7 +2849,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"http_client",
|
||||
"indoc",
|
||||
"isahc",
|
||||
"language",
|
||||
"lsp",
|
||||
"menu",
|
||||
@@ -3082,7 +3081,7 @@ dependencies = [
|
||||
"cranelift-control",
|
||||
"cranelift-entity",
|
||||
"cranelift-isle",
|
||||
"gimli",
|
||||
"gimli 0.29.0",
|
||||
"hashbrown 0.14.5",
|
||||
"log",
|
||||
"regalloc2",
|
||||
@@ -3730,6 +3729,7 @@ dependencies = [
|
||||
"multi_buffer",
|
||||
"ordered-float 2.10.1",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"release_channel",
|
||||
@@ -4127,7 +4127,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"http_client",
|
||||
"indexed_docs",
|
||||
"isahc",
|
||||
"isahc_http_client",
|
||||
"language",
|
||||
"log",
|
||||
@@ -4288,7 +4287,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"http_client",
|
||||
"human_bytes",
|
||||
"isahc",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
@@ -4325,6 +4323,7 @@ dependencies = [
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger",
|
||||
"file_icons",
|
||||
"futures 0.3.30",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
@@ -4332,7 +4331,9 @@ dependencies = [
|
||||
"menu",
|
||||
"picker",
|
||||
"project",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"text",
|
||||
@@ -4872,6 +4873,12 @@ dependencies = [
|
||||
"stable_deref_trait",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gimli"
|
||||
version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64"
|
||||
|
||||
[[package]]
|
||||
name = "git"
|
||||
version = "0.1.0"
|
||||
@@ -4939,9 +4946,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.14"
|
||||
version = "0.4.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1"
|
||||
checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
@@ -5006,7 +5013,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.30",
|
||||
"http_client",
|
||||
"isahc",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -5681,9 +5687,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ignore"
|
||||
version = "0.4.22"
|
||||
version = "0.4.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1"
|
||||
checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"globset",
|
||||
@@ -6423,9 +6429,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.158"
|
||||
version = "0.2.159"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
|
||||
checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5"
|
||||
|
||||
[[package]]
|
||||
name = "libdbus-sys"
|
||||
@@ -7044,7 +7050,6 @@ dependencies = [
|
||||
"ctor",
|
||||
"env_logger",
|
||||
"futures 0.3.30",
|
||||
"git",
|
||||
"gpui",
|
||||
"itertools 0.13.0",
|
||||
"language",
|
||||
@@ -7179,6 +7184,7 @@ dependencies = [
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"async_zip",
|
||||
"futures 0.3.30",
|
||||
"http_client",
|
||||
@@ -7191,6 +7197,7 @@ dependencies = [
|
||||
"tempfile",
|
||||
"util",
|
||||
"walkdir",
|
||||
"which 6.0.3",
|
||||
"windows 0.58.0",
|
||||
]
|
||||
|
||||
@@ -7579,7 +7586,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.30",
|
||||
"http_client",
|
||||
"isahc",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -9110,6 +9116,7 @@ dependencies = [
|
||||
"gpui",
|
||||
"http_client",
|
||||
"language",
|
||||
"languages",
|
||||
"log",
|
||||
"lsp",
|
||||
"node_runtime",
|
||||
@@ -10486,6 +10493,7 @@ dependencies = [
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"snippet",
|
||||
@@ -13108,7 +13116,7 @@ dependencies = [
|
||||
"cranelift-frontend",
|
||||
"cranelift-native",
|
||||
"cranelift-wasm",
|
||||
"gimli",
|
||||
"gimli 0.29.0",
|
||||
"log",
|
||||
"object",
|
||||
"target-lexicon",
|
||||
@@ -13128,7 +13136,7 @@ dependencies = [
|
||||
"cpp_demangle",
|
||||
"cranelift-bitset",
|
||||
"cranelift-entity",
|
||||
"gimli",
|
||||
"gimli 0.29.0",
|
||||
"indexmap 2.4.0",
|
||||
"log",
|
||||
"object",
|
||||
@@ -13242,7 +13250,7 @@ checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
"gimli",
|
||||
"gimli 0.29.0",
|
||||
"object",
|
||||
"target-lexicon",
|
||||
"wasmparser 0.215.0",
|
||||
@@ -13539,7 +13547,7 @@ checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
"gimli",
|
||||
"gimli 0.29.0",
|
||||
"regalloc2",
|
||||
"smallvec",
|
||||
"target-lexicon",
|
||||
@@ -14096,6 +14104,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"project",
|
||||
"remote",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -14375,13 +14384,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.154.2"
|
||||
version = "0.156.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
"ashpd",
|
||||
"assets",
|
||||
"assistant",
|
||||
"async-watch",
|
||||
"audio",
|
||||
"auto_update",
|
||||
"backtrace",
|
||||
@@ -14419,7 +14429,6 @@ dependencies = [
|
||||
"image_viewer",
|
||||
"inline_completion_button",
|
||||
"install_cli",
|
||||
"isahc",
|
||||
"isahc_http_client",
|
||||
"journal",
|
||||
"language",
|
||||
@@ -14455,6 +14464,7 @@ dependencies = [
|
||||
"session",
|
||||
"settings",
|
||||
"settings_ui",
|
||||
"shellexpand 2.1.2",
|
||||
"simplelog",
|
||||
"smol",
|
||||
"snippet_provider",
|
||||
@@ -14606,7 +14616,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_lua"
|
||||
version = "0.0.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
@@ -14670,7 +14680,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_terraform"
|
||||
version = "0.1.0"
|
||||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
@@ -196,7 +196,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar && in_replace",
|
||||
"context": "BufferSearchBar && in_replace > Editor",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"ctrl-enter": "search::ReplaceAll"
|
||||
@@ -310,6 +310,11 @@
|
||||
"ctrl-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"ctrl-shift-[": "editor::Fold",
|
||||
"ctrl-shift-]": "editor::UnfoldLines",
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-.": "editor::ToggleCodeActions",
|
||||
"alt-ctrl-r": "editor::RevealInFileManager",
|
||||
@@ -520,6 +525,13 @@
|
||||
"alt-enter": "editor::Newline"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "assistant::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
|
||||
@@ -232,7 +232,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar && in_replace",
|
||||
"context": "BufferSearchBar && in_replace > Editor",
|
||||
"bindings": {
|
||||
"enter": "search::ReplaceNext",
|
||||
"cmd-enter": "search::ReplaceAll"
|
||||
@@ -347,6 +347,11 @@
|
||||
"cmd-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"alt-cmd-[": "editor::Fold",
|
||||
"alt-cmd-]": "editor::UnfoldLines",
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"alt-cmd-r": "editor::RevealInFileManager",
|
||||
@@ -527,6 +532,13 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "assistant::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"bindings": {
|
||||
|
||||
@@ -124,7 +124,6 @@
|
||||
"g i": "vim::InsertAtPrevious",
|
||||
"g ,": "vim::ChangeListNewer",
|
||||
"g ;": "vim::ChangeListOlder",
|
||||
"g q": "editor::Rewrap",
|
||||
"shift-h": "vim::WindowTop",
|
||||
"shift-m": "vim::WindowMiddle",
|
||||
"shift-l": "vim::WindowBottom",
|
||||
@@ -133,9 +132,15 @@
|
||||
"z z": "editor::ScrollCursorCenter",
|
||||
"z .": ["workspace::SendKeystrokes", "z z ^"],
|
||||
"z b": "editor::ScrollCursorBottom",
|
||||
"z a": "editor::ToggleFold",
|
||||
"z A": "editor::ToggleFoldRecursive",
|
||||
"z c": "editor::Fold",
|
||||
"z C": "editor::FoldRecursive",
|
||||
"z o": "editor::UnfoldLines",
|
||||
"z O": "editor::UnfoldRecursive",
|
||||
"z f": "editor::FoldSelectedRanges",
|
||||
"z M": "editor::FoldAll",
|
||||
"z R": "editor::UnfoldAll",
|
||||
"shift-z shift-q": ["pane::CloseActiveItem", { "saveIntent": "skip" }],
|
||||
"shift-z shift-z": ["pane::CloseActiveItem", { "saveIntent": "saveAll" }],
|
||||
// Count support
|
||||
@@ -240,6 +245,8 @@
|
||||
"g shift-u": ["vim::PushOperator", "Uppercase"],
|
||||
"g ~": ["vim::PushOperator", "OppositeCase"],
|
||||
"\"": ["vim::PushOperator", "Register"],
|
||||
"g q": ["vim::PushOperator", "Rewrap"],
|
||||
"g w": ["vim::PushOperator", "Rewrap"],
|
||||
"q": "vim::ToggleRecord",
|
||||
"shift-q": "vim::ReplayLastRecording",
|
||||
"@": ["vim::PushOperator", "ReplayRegister"],
|
||||
@@ -291,6 +298,8 @@
|
||||
"g ctrl-x": ["vim::Decrement", { "step": true }],
|
||||
"shift-i": "vim::InsertBefore",
|
||||
"shift-a": "vim::InsertAfter",
|
||||
"g I": "vim::VisualInsertFirstNonWhiteSpace",
|
||||
"g A": "vim::VisualInsertEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"ctrl-c": ["vim::SwitchMode", "Normal"],
|
||||
@@ -301,6 +310,7 @@
|
||||
"i": ["vim::PushOperator", { "Object": { "around": false } }],
|
||||
"a": ["vim::PushOperator", { "Object": { "around": true } }],
|
||||
"g c": "vim::ToggleComments",
|
||||
"g q": "vim::Rewrap",
|
||||
"\"": ["vim::PushOperator", "Register"],
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
@@ -428,6 +438,15 @@
|
||||
"~": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == gq",
|
||||
"bindings": {
|
||||
"g q": "vim::CurrentLine",
|
||||
"q": "vim::CurrentLine",
|
||||
"g w": "vim::CurrentLine",
|
||||
"w": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == y",
|
||||
"bindings": {
|
||||
|
||||
@@ -47,6 +47,17 @@ And here's the section to rewrite based on that prompt again for reference:
|
||||
<rewrite_this>
|
||||
{{{rewrite_section}}}
|
||||
</rewrite_this>
|
||||
|
||||
{{#if diagnostic_errors}}
|
||||
{{#each diagnostic_errors}}
|
||||
<diagnostic_error>
|
||||
<line_number>{{line_number}}</line_number>
|
||||
<error_message>{{error_message}}</error_message>
|
||||
<code_content>{{code_content}}</code_content>
|
||||
</diagnostic_error>
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{/if}}
|
||||
|
||||
Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.
|
||||
|
||||
8
assets/prompts/project_slash_command.hbs
Normal file
8
assets/prompts/project_slash_command.hbs
Normal file
@@ -0,0 +1,8 @@
|
||||
A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings.
|
||||
Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets
|
||||
that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently
|
||||
distinct from previous ones.
|
||||
|
||||
Here is the question that's been asked, together with context that the developer has added manually:
|
||||
|
||||
{{{context_buffer}}}
|
||||
@@ -15,9 +15,11 @@
|
||||
// text editor:
|
||||
//
|
||||
// 1. "VSCode"
|
||||
// 2. "JetBrains"
|
||||
// 3. "SublimeText"
|
||||
// 4. "Atom"
|
||||
// 2. "Atom"
|
||||
// 3. "JetBrains"
|
||||
// 4. "None"
|
||||
// 5. "SublimeText"
|
||||
// 6. "TextMate"
|
||||
"base_keymap": "VSCode",
|
||||
// Features that can be globally enabled or disabled
|
||||
"features": {
|
||||
@@ -496,6 +498,11 @@
|
||||
// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
|
||||
"enable_preview_from_code_navigation": false
|
||||
},
|
||||
// Settings related to the file finder.
|
||||
"file_finder": {
|
||||
// Whether to show file icons in the file finder.
|
||||
"file_icons": true
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
@@ -528,17 +535,16 @@
|
||||
// How to soft-wrap long lines of text.
|
||||
// Possible values:
|
||||
//
|
||||
// 1. Do not soft wrap.
|
||||
// 1. Prefer a single line generally, unless an overly long line is encountered.
|
||||
// "soft_wrap": "none",
|
||||
// 2. Prefer a single line generally, unless an overly long line is encountered.
|
||||
// "soft_wrap": "prefer_line",
|
||||
// 3. Soft wrap lines that overflow the editor.
|
||||
// "soft_wrap": "prefer_line", // (deprecated, same as "none")
|
||||
// 2. Soft wrap lines that overflow the editor.
|
||||
// "soft_wrap": "editor_width",
|
||||
// 4. Soft wrap lines at the preferred line length.
|
||||
// 3. Soft wrap lines at the preferred line length.
|
||||
// "soft_wrap": "preferred_line_length",
|
||||
// 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// 4. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// "soft_wrap": "bounded",
|
||||
"soft_wrap": "prefer_line",
|
||||
"soft_wrap": "none",
|
||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||
// is enabled.
|
||||
"preferred_line_length": 80,
|
||||
@@ -755,6 +761,7 @@
|
||||
// }
|
||||
//
|
||||
"file_types": {
|
||||
"Plain Text": ["txt"],
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
@@ -762,8 +769,24 @@
|
||||
"**/Zed/**/*.json",
|
||||
"tsconfig.json",
|
||||
"pyrightconfig.json"
|
||||
]
|
||||
],
|
||||
"TOML": ["uv.lock"]
|
||||
},
|
||||
/// By default use a recent system version of node, or install our own.
|
||||
/// You can override this to use a version of node that is not in $PATH with:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "node_path": "/path/to/node"
|
||||
/// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
|
||||
/// }
|
||||
/// }
|
||||
/// or to ensure Zed always downloads and installs an isolated version of node:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "ignore_system_version": true,
|
||||
/// }
|
||||
/// NOTE: changing this setting currently requires restarting Zed.
|
||||
"node": {},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
// If you don't want any of these extensions, add this field to your settings
|
||||
@@ -1029,7 +1052,7 @@
|
||||
// environment variables.
|
||||
//
|
||||
// Examples:
|
||||
// - "proxy": "socks5://localhost:10808"
|
||||
// - "proxy": "socks5h://localhost:10808"
|
||||
// - "proxy": "http://127.0.0.1:10809"
|
||||
"proxy": null,
|
||||
// Set to configure aliases for the command palette.
|
||||
|
||||
@@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
ShowError { lsp_name: Arc<str>, error: String },
|
||||
ShowError {
|
||||
lsp_name: LanguageServerName,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct ActivityIndicator {
|
||||
@@ -123,7 +126,7 @@ impl ActivityIndicator {
|
||||
self.statuses.retain(|status| {
|
||||
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
lsp_name: status.name.0.clone(),
|
||||
lsp_name: status.name.clone(),
|
||||
error: error.clone(),
|
||||
});
|
||||
false
|
||||
@@ -224,10 +227,10 @@ impl ActivityIndicator {
|
||||
for status in &self.statuses {
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.0.as_ref())
|
||||
checking_for_update.push(status.name.clone())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
@@ -239,8 +242,24 @@ impl ActivityIndicator {
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Downloading {}...", downloading.join(", "),),
|
||||
on_click: None,
|
||||
message: format!(
|
||||
"Downloading {}...",
|
||||
downloading.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
)
|
||||
),
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !downloading.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -253,9 +272,22 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.join(", "),
|
||||
checking_for_update.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
),
|
||||
),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !checking_for_update.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -267,8 +299,17 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!(
|
||||
"Failed to download {}. Click to show error.",
|
||||
failed.join(", "),
|
||||
"Failed to run {}. Click to show error.",
|
||||
failed
|
||||
.iter()
|
||||
.map(|name| name.0.as_ref())
|
||||
.fold(String::new(), |mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}),
|
||||
),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.show_error_message(&Default::default(), cx)
|
||||
@@ -277,7 +318,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
// Show any formatting failure
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure() {
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
@@ -301,7 +342,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Checking for Zed updates…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Downloading => Some(Content {
|
||||
icon: Some(
|
||||
@@ -310,7 +353,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Downloading Zed update…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Installing => Some(Content {
|
||||
icon: Some(
|
||||
@@ -319,7 +364,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Installing Zed update…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Updated { binary_path } => Some(Content {
|
||||
icon: None,
|
||||
@@ -339,7 +386,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Auto update failed".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&Default::default(), cx)
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Idle => None,
|
||||
@@ -357,7 +404,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Updating {extension_id} extension…"),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
futures.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -6,9 +6,8 @@ use std::{pin::Pin, str::FromStr};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use isahc::http::{HeaderMap, HeaderValue};
|
||||
use http_client::http::{HeaderMap, HeaderValue};
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
@@ -49,6 +48,7 @@ pub enum Model {
|
||||
/// Indicates whether this custom model supports caching.
|
||||
cache_configuration: Option<AnthropicModelCacheConfiguration>,
|
||||
max_output_tokens: Option<u32>,
|
||||
default_temperature: Option<f32>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -124,6 +124,19 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_temperature(&self) -> f32 {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => 1.0,
|
||||
Self::Custom {
|
||||
default_temperature,
|
||||
..
|
||||
} => default_temperature.unwrap_or(1.0),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tool_model_id(&self) -> &str {
|
||||
if let Self::Custom {
|
||||
tool_override: Some(tool_override),
|
||||
@@ -275,7 +288,7 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
|
||||
@@ -51,6 +51,7 @@ indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
|
||||
@@ -41,9 +41,10 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
auto_command, context_server_command, default_command, delta_command, diagnostics_command,
|
||||
docs_command, fetch_command, file_command, now_command, project_command, prompt_command,
|
||||
search_command, symbols_command, tab_command, terminal_command, workflow_command,
|
||||
auto_command, cargo_workspace_command, context_server_command, default_command, delta_command,
|
||||
diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command,
|
||||
prompt_command, search_command, symbols_command, tab_command, terminal_command,
|
||||
workflow_command,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -69,6 +70,8 @@ actions!(
|
||||
ConfirmCommand,
|
||||
NewContext,
|
||||
ToggleModelSelector,
|
||||
CycleNextInlineAssist,
|
||||
CyclePreviousInlineAssist
|
||||
]
|
||||
);
|
||||
|
||||
@@ -359,8 +362,19 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let inline_alternatives = settings
|
||||
.inline_alternatives
|
||||
.iter()
|
||||
.map(|alternative| {
|
||||
(
|
||||
LanguageModelProviderId::from(alternative.provider.clone()),
|
||||
LanguageModelId::from(alternative.model.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
registry.select_inline_alternative_models(inline_alternatives, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -371,20 +385,33 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
slash_command_registry.register_command(delta_command::DeltaSlashCommand, true);
|
||||
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
|
||||
slash_command_registry.register_command(tab_command::TabSlashCommand, true);
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
slash_command_registry
|
||||
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
|
||||
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, false);
|
||||
slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, false);
|
||||
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
cx.observe_flag::<project_command::ProjectSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
if is_enabled {
|
||||
slash_command_registry.register_command(
|
||||
project_command::ProjectSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
cx.observe_flag::<auto_command::AutoSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
@@ -422,10 +449,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) {
|
||||
slash_command_registry.unregister_command(docs_command::DocsSlashCommand);
|
||||
}
|
||||
|
||||
if settings.project.enabled {
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
if settings.cargo_workspace.enabled {
|
||||
slash_command_registry
|
||||
.register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry.unregister_command(project_command::ProjectSlashCommand);
|
||||
slash_command_registry
|
||||
.unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use text::SelectionGoal;
|
||||
use ui::TintColor;
|
||||
use ui::{
|
||||
prelude::*,
|
||||
@@ -960,7 +961,8 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
let project = self.project.read(cx);
|
||||
if project.is_via_collab() && project.dev_server_project_id().is_none() {
|
||||
let task = self
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
@@ -2814,9 +2816,8 @@ impl ContextEditor {
|
||||
} else {
|
||||
// If there are multiple buffers or suggestion groups, create a multibuffer
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let replica_id = project.read(cx).replica_id();
|
||||
let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite)
|
||||
.with_title(resolved_step.title.clone());
|
||||
let mut multibuffer =
|
||||
MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone());
|
||||
for (buffer, groups) in &resolved_step.suggestion_groups {
|
||||
let excerpt_ids = multibuffer.push_excerpts(
|
||||
buffer.clone(),
|
||||
@@ -3438,7 +3439,7 @@ impl ContextEditor {
|
||||
|
||||
fn copy(&mut self, _: &editor::actions::Copy, cx: &mut ViewContext<Self>) {
|
||||
if self.editor.read(cx).selections.count() == 1 {
|
||||
let (copied_text, metadata) = self.get_clipboard_contents(cx);
|
||||
let (copied_text, metadata, _) = self.get_clipboard_contents(cx);
|
||||
cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata(
|
||||
copied_text,
|
||||
metadata,
|
||||
@@ -3452,11 +3453,9 @@ impl ContextEditor {
|
||||
|
||||
fn cut(&mut self, _: &editor::actions::Cut, cx: &mut ViewContext<Self>) {
|
||||
if self.editor.read(cx).selections.count() == 1 {
|
||||
let (copied_text, metadata) = self.get_clipboard_contents(cx);
|
||||
let (copied_text, metadata, selections) = self.get_clipboard_contents(cx);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let selections = editor.selections.all::<Point>(cx);
|
||||
|
||||
editor.transact(cx, |this, cx| {
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(selections);
|
||||
@@ -3476,52 +3475,71 @@ impl ContextEditor {
|
||||
cx.propagate();
|
||||
}
|
||||
|
||||
fn get_clipboard_contents(&mut self, cx: &mut ViewContext<Self>) -> (String, CopyMetadata) {
|
||||
let creases = self.editor.update(cx, |editor, cx| {
|
||||
let selection = editor.selections.newest::<Point>(cx);
|
||||
let selection_start = editor.selections.newest::<usize>(cx).start;
|
||||
fn get_clipboard_contents(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> (String, CopyMetadata, Vec<text::Selection<usize>>) {
|
||||
let (snapshot, selection, creases) = self.editor.update(cx, |editor, cx| {
|
||||
let mut selection = editor.selections.newest::<Point>(cx);
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases_in_range(
|
||||
MultiBufferRow(selection.start.row)..MultiBufferRow(selection.end.row + 1),
|
||||
&snapshot,
|
||||
)
|
||||
.filter_map(|crease| {
|
||||
if let Some(metadata) = &crease.metadata {
|
||||
let start = crease
|
||||
.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
let end = crease
|
||||
.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
|
||||
let range_relative_to_selection = start..end;
|
||||
let is_entire_line = selection.is_empty() || editor.selections.line_mode;
|
||||
if is_entire_line {
|
||||
selection.start = Point::new(selection.start.row, 0);
|
||||
selection.end =
|
||||
cmp::min(snapshot.max_point(), Point::new(selection.start.row + 1, 0));
|
||||
selection.goal = SelectionGoal::None;
|
||||
}
|
||||
|
||||
if range_relative_to_selection.is_empty() {
|
||||
None
|
||||
let selection_start = snapshot.point_to_offset(selection.start);
|
||||
|
||||
(
|
||||
snapshot.clone(),
|
||||
selection.clone(),
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases_in_range(
|
||||
MultiBufferRow(selection.start.row)
|
||||
..MultiBufferRow(selection.end.row + 1),
|
||||
&snapshot,
|
||||
)
|
||||
.filter_map(|crease| {
|
||||
if let Some(metadata) = &crease.metadata {
|
||||
let start = crease
|
||||
.range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
let end = crease
|
||||
.range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
|
||||
let range_relative_to_selection = start..end;
|
||||
|
||||
if range_relative_to_selection.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SelectedCreaseMetadata {
|
||||
range_relative_to_selection,
|
||||
crease: metadata.clone(),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
Some(SelectedCreaseMetadata {
|
||||
range_relative_to_selection,
|
||||
crease: metadata.clone(),
|
||||
})
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}),
|
||||
)
|
||||
});
|
||||
|
||||
let selection = selection.map(|point| snapshot.point_to_offset(point));
|
||||
let context = self.context.read(cx);
|
||||
let selection = self.editor.read(cx).selections.newest::<usize>(cx);
|
||||
|
||||
let mut text = String::new();
|
||||
for message in context.messages(cx) {
|
||||
if message.offset_range.start >= selection.range().end {
|
||||
@@ -3540,7 +3558,7 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
(text, CopyMetadata { creases })
|
||||
(text, CopyMetadata { creases }, vec![selection])
|
||||
}
|
||||
|
||||
fn paste(&mut self, action: &editor::actions::Paste, cx: &mut ViewContext<Self>) {
|
||||
|
||||
@@ -59,6 +59,7 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
}
|
||||
|
||||
@@ -236,6 +237,7 @@ impl AssistantSettingsContent {
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
|
||||
},
|
||||
@@ -254,6 +256,7 @@ impl AssistantSettingsContent {
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -369,6 +372,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
inline_alternatives: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -397,6 +401,8 @@ pub struct AssistantSettingsContentV2 {
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new contexts.
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
@@ -517,10 +523,8 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.default_height,
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(
|
||||
&mut settings.default_model,
|
||||
value.default_model.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
// merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference
|
||||
}
|
||||
|
||||
@@ -574,6 +578,7 @@ mod tests {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
|
||||
@@ -683,7 +683,7 @@ impl Context {
|
||||
buffer.set_text(saved_context.text.as_str(), cx)
|
||||
});
|
||||
let operations = saved_context.into_ops(&this.buffer, cx);
|
||||
this.apply_ops(operations, cx).unwrap();
|
||||
this.apply_ops(operations, cx);
|
||||
this
|
||||
}
|
||||
|
||||
@@ -756,7 +756,7 @@ impl Context {
|
||||
&mut self,
|
||||
ops: impl IntoIterator<Item = ContextOperation>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<()> {
|
||||
) {
|
||||
let mut buffer_ops = Vec::new();
|
||||
for op in ops {
|
||||
match op {
|
||||
@@ -765,10 +765,8 @@ impl Context {
|
||||
}
|
||||
}
|
||||
self.buffer
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?;
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx));
|
||||
self.flush_ops(cx);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn flush_ops(&mut self, cx: &mut ModelContext<Context>) {
|
||||
@@ -1008,9 +1006,12 @@ impl Context {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
language::BufferEvent::Operation(operation) => cx.emit(ContextEvent::Operation(
|
||||
ContextOperation::BufferOperation(operation.clone()),
|
||||
)),
|
||||
language::BufferEvent::Operation {
|
||||
operation,
|
||||
is_local: true,
|
||||
} => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation(
|
||||
operation.clone(),
|
||||
))),
|
||||
language::BufferEvent::Edited => {
|
||||
self.count_remaining_tokens(cx);
|
||||
self.reparse(cx);
|
||||
@@ -1969,8 +1970,9 @@ impl Context {
|
||||
}
|
||||
|
||||
pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider()?;
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model()?;
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider = model_registry.active_provider()?;
|
||||
let model = model_registry.active_model()?;
|
||||
let last_message_id = self.get_last_valid_message_id(cx)?;
|
||||
|
||||
if !provider.is_authenticated(cx) {
|
||||
@@ -2182,7 +2184,7 @@ impl Context {
|
||||
messages: Vec::new(),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
temperature: None,
|
||||
};
|
||||
for message in self.messages(cx) {
|
||||
if message.status != MessageStatus::Done {
|
||||
|
||||
@@ -1166,9 +1166,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
);
|
||||
|
||||
network.lock().broadcast(replica_id, ops_to_send);
|
||||
context
|
||||
.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx))
|
||||
.unwrap();
|
||||
context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx));
|
||||
} else if rng.gen_bool(0.1) && replica_id != 0 {
|
||||
log::info!("Context {}: disconnecting", context_index);
|
||||
network.lock().disconnect_peer(replica_id);
|
||||
@@ -1180,9 +1178,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
.map(ContextOperation::from_proto)
|
||||
.collect::<Result<Vec<_>>>()
|
||||
.unwrap();
|
||||
context
|
||||
.update(cx, |context, cx| context.apply_ops(ops, cx))
|
||||
.unwrap();
|
||||
context.update(cx, |context, cx| context.apply_ops(ops, cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,7 +223,7 @@ impl ContextStore {
|
||||
if let Some(context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
let operation_proto = envelope.payload.operation.context("invalid operation")?;
|
||||
let operation = ContextOperation::from_proto(operation_proto)?;
|
||||
context.update(cx, |context, cx| context.apply_ops([operation], cx))?;
|
||||
context.update(cx, |context, cx| context.apply_ops([operation], cx));
|
||||
}
|
||||
Ok(())
|
||||
})?
|
||||
@@ -357,9 +357,6 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
let capability = project.capability();
|
||||
@@ -394,7 +391,7 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
@@ -488,9 +485,6 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
|
||||
}
|
||||
|
||||
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
|
||||
return Task::ready(Ok(context));
|
||||
@@ -531,7 +525,7 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -796,7 +796,7 @@ impl PromptLibrary {
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
temperature: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -4,13 +4,20 @@ use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::AssetSource;
|
||||
use handlebars::{Handlebars, RenderError};
|
||||
use language::{BufferSnapshot, LanguageName};
|
||||
use language::{BufferSnapshot, LanguageName, Point};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
use text::LineEnding;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ContentPromptDiagnosticContext {
|
||||
pub line_number: usize,
|
||||
pub error_message: String,
|
||||
pub code_content: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ContentPromptContext {
|
||||
pub content_type: String,
|
||||
@@ -20,6 +27,7 @@ pub struct ContentPromptContext {
|
||||
pub document_content: String,
|
||||
pub user_prompt: String,
|
||||
pub rewrite_section: Option<String>,
|
||||
pub diagnostic_errors: Vec<ContentPromptDiagnosticContext>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -32,6 +40,11 @@ pub struct TerminalAssistantPromptContext {
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct ProjectSlashCommandPromptContext {
|
||||
pub context_buffer: String,
|
||||
}
|
||||
|
||||
/// Context required to generate a workflow step resolution prompt.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct StepResolutionContext {
|
||||
@@ -82,10 +95,9 @@ impl PromptBuilder {
|
||||
/// and application context.
|
||||
/// * `handlebars` - An `Arc<Mutex<Handlebars>>` for registering and updating templates.
|
||||
fn watch_fs_for_template_overrides(
|
||||
mut params: PromptLoadingParams,
|
||||
params: PromptLoadingParams,
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
) {
|
||||
params.repo_path = None;
|
||||
let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref());
|
||||
params.cx.background_executor()
|
||||
.spawn(async move {
|
||||
@@ -261,6 +273,17 @@ impl PromptBuilder {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false);
|
||||
let diagnostic_errors: Vec<ContentPromptDiagnosticContext> = diagnostics
|
||||
.map(|entry| {
|
||||
let start = entry.range.start;
|
||||
ContentPromptDiagnosticContext {
|
||||
line_number: (start.row + 1) as usize,
|
||||
error_message: entry.diagnostic.message.clone(),
|
||||
code_content: buffer.text_for_range(entry.range.clone()).collect(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let context = ContentPromptContext {
|
||||
content_type: content_type.to_string(),
|
||||
@@ -270,8 +293,8 @@ impl PromptBuilder {
|
||||
document_content,
|
||||
user_prompt,
|
||||
rewrite_section,
|
||||
diagnostic_errors,
|
||||
};
|
||||
|
||||
self.handlebars.lock().render("content_prompt", &context)
|
||||
}
|
||||
|
||||
@@ -299,4 +322,14 @@ impl PromptBuilder {
|
||||
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
}
|
||||
|
||||
pub fn generate_project_slash_command_prompt(
|
||||
&self,
|
||||
context_buffer: String,
|
||||
) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render(
|
||||
"project_slash_command",
|
||||
&ProjectSlashCommandPromptContext { context_buffer },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,8 @@ use std::{
|
||||
};
|
||||
use ui::ActiveTheme;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub mod auto_command;
|
||||
pub mod cargo_workspace_command;
|
||||
pub mod context_server_command;
|
||||
pub mod default_command;
|
||||
pub mod delta_command;
|
||||
|
||||
@@ -216,7 +216,7 @@ async fn commands_for_summaries(
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
temperature: None,
|
||||
};
|
||||
|
||||
while let Some(current_summaries) = stack.pop() {
|
||||
|
||||
153
crates/assistant/src/slash_command/cargo_workspace_command.rs
Normal file
153
crates/assistant/src/slash_command/cargo_workspace_command.rs
Normal file
@@ -0,0 +1,153 @@
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct CargoWorkspaceSlashCommand;
|
||||
|
||||
impl CargoWorkspaceSlashCommand {
|
||||
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
|
||||
let buffer = fs.load(path_to_cargo_toml).await?;
|
||||
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
|
||||
|
||||
let mut message = String::new();
|
||||
writeln!(message, "You are in a Rust project.")?;
|
||||
|
||||
if let Some(workspace) = cargo_toml.workspace {
|
||||
writeln!(
|
||||
message,
|
||||
"The project is a Cargo workspace with the following members:"
|
||||
)?;
|
||||
for member in workspace.members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
|
||||
if !workspace.default_members.is_empty() {
|
||||
writeln!(message, "The default members are:")?;
|
||||
for member in workspace.default_members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !workspace.dependencies.is_empty() {
|
||||
writeln!(
|
||||
message,
|
||||
"The following workspace dependencies are installed:"
|
||||
)?;
|
||||
for dependency in workspace.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
} else if let Some(package) = cargo_toml.package {
|
||||
writeln!(
|
||||
message,
|
||||
"The project name is \"{name}\".",
|
||||
name = package.name
|
||||
)?;
|
||||
|
||||
let description = package
|
||||
.description
|
||||
.as_ref()
|
||||
.and_then(|description| description.get().ok().cloned());
|
||||
if let Some(description) = description.as_ref() {
|
||||
writeln!(message, "It describes itself as \"{description}\".")?;
|
||||
}
|
||||
|
||||
if !cargo_toml.dependencies.is_empty() {
|
||||
writeln!(message, "The following dependencies are installed:")?;
|
||||
for dependency in cargo_toml.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
Some(Arc::from(
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for CargoWorkspaceSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"cargo-workspace".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert project workspace metadata".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Project Workspace Metadata".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let fs = workspace.project().read(cx).fs().clone();
|
||||
let path = Self::path_to_cargo_toml(project, cx);
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
let path = path.with_context(|| "Cargo.toml not found")?;
|
||||
Self::build_message(fs, &path).await
|
||||
});
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = output.await?;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::FileTree,
|
||||
label: "Project".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
@@ -1,90 +1,39 @@
|
||||
use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use super::{
|
||||
create_label_for_command, search_command::add_search_result_section, SlashCommand,
|
||||
SlashCommandOutput,
|
||||
};
|
||||
use crate::PromptBuilder;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use project::{Project, ProjectPath};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView, WindowContext};
|
||||
use language::{Anchor, CodeLabel, LspAdapterDelegate};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelTool};
|
||||
use schemars::JsonSchema;
|
||||
use semantic_index::SemanticDb;
|
||||
use serde::Deserialize;
|
||||
|
||||
pub struct ProjectSlashCommandFeatureFlag;
|
||||
|
||||
impl FeatureFlag for ProjectSlashCommandFeatureFlag {
|
||||
const NAME: &'static str = "project-slash-command";
|
||||
}
|
||||
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
fmt::Write as _,
|
||||
ops::DerefMut,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use ui::{BorrowAppContext as _, IconName};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct ProjectSlashCommand;
|
||||
pub struct ProjectSlashCommand {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
impl ProjectSlashCommand {
|
||||
async fn build_message(fs: Arc<dyn Fs>, path_to_cargo_toml: &Path) -> Result<String> {
|
||||
let buffer = fs.load(path_to_cargo_toml).await?;
|
||||
let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?;
|
||||
|
||||
let mut message = String::new();
|
||||
writeln!(message, "You are in a Rust project.")?;
|
||||
|
||||
if let Some(workspace) = cargo_toml.workspace {
|
||||
writeln!(
|
||||
message,
|
||||
"The project is a Cargo workspace with the following members:"
|
||||
)?;
|
||||
for member in workspace.members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
|
||||
if !workspace.default_members.is_empty() {
|
||||
writeln!(message, "The default members are:")?;
|
||||
for member in workspace.default_members {
|
||||
writeln!(message, "- {member}")?;
|
||||
}
|
||||
}
|
||||
|
||||
if !workspace.dependencies.is_empty() {
|
||||
writeln!(
|
||||
message,
|
||||
"The following workspace dependencies are installed:"
|
||||
)?;
|
||||
for dependency in workspace.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
} else if let Some(package) = cargo_toml.package {
|
||||
writeln!(
|
||||
message,
|
||||
"The project name is \"{name}\".",
|
||||
name = package.name
|
||||
)?;
|
||||
|
||||
let description = package
|
||||
.description
|
||||
.as_ref()
|
||||
.and_then(|description| description.get().ok().cloned());
|
||||
if let Some(description) = description.as_ref() {
|
||||
writeln!(message, "It describes itself as \"{description}\".")?;
|
||||
}
|
||||
|
||||
if !cargo_toml.dependencies.is_empty() {
|
||||
writeln!(message, "The following dependencies are installed:")?;
|
||||
for dependency in cargo_toml.dependencies.keys() {
|
||||
writeln!(message, "- {dependency}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(message)
|
||||
}
|
||||
|
||||
fn path_to_cargo_toml(project: Model<Project>, cx: &mut AppContext) -> Option<Arc<Path>> {
|
||||
let worktree = project.read(cx).worktrees(cx).next()?;
|
||||
let worktree = worktree.read(cx);
|
||||
let entry = worktree.entry_for_path("Cargo.toml")?;
|
||||
let path = ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: entry.path.clone(),
|
||||
};
|
||||
Some(Arc::from(
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
|
||||
Self { prompt_builder }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
"project".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
create_label_for_command("project", &[], cx)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert project metadata".into()
|
||||
"Generate semantic searches based on the current context".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Project Metadata".into()
|
||||
"Project Context".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
@@ -108,46 +65,126 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<Anchor>],
|
||||
context_buffer: language::BufferSnapshot,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let fs = workspace.project().read(cx).fs().clone();
|
||||
let path = Self::path_to_cargo_toml(project, cx);
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
let path = path.with_context(|| "Cargo.toml not found")?;
|
||||
Self::build_message(fs, &path).await
|
||||
});
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let current_model = model_registry.active_model();
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = output.await?;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::FileTree,
|
||||
label: "Project".into(),
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
|
||||
};
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let Some(project_index) =
|
||||
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
|
||||
else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
|
||||
|
||||
let prompt =
|
||||
prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?;
|
||||
|
||||
let search_queries = current_model
|
||||
.use_tool::<SearchQueries>(
|
||||
language_model::LanguageModelRequest {
|
||||
messages: vec![language_model::LanguageModelRequestMessage {
|
||||
role: language_model::Role::User,
|
||||
content: vec![language_model::MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
}],
|
||||
tools: vec![],
|
||||
stop: vec![],
|
||||
temperature: None,
|
||||
},
|
||||
cx.deref_mut(),
|
||||
)
|
||||
.await?
|
||||
.search_queries;
|
||||
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
project_index.search(search_queries.clone(), 25, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let results = SemanticDb::load_results(results, &fs, &cx).await?;
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut output = "Project context:\n".to_string();
|
||||
let mut sections = Vec::new();
|
||||
|
||||
for (ix, query) in search_queries.into_iter().enumerate() {
|
||||
let start_ix = output.len();
|
||||
writeln!(&mut output, "Results for {query}:").unwrap();
|
||||
let mut has_results = false;
|
||||
for result in &results {
|
||||
if result.query_index == ix {
|
||||
add_search_result_section(result, &mut output, &mut sections);
|
||||
has_results = true;
|
||||
}
|
||||
}
|
||||
if has_results {
|
||||
sections.push(SlashCommandOutputSection {
|
||||
range: start_ix..output.len(),
|
||||
icon: IconName::MagnifyingGlass,
|
||||
label: query.into(),
|
||||
metadata: None,
|
||||
});
|
||||
output.push('\n');
|
||||
} else {
|
||||
output.truncate(start_ix);
|
||||
}
|
||||
}
|
||||
|
||||
sections.push(SlashCommandOutputSection {
|
||||
range: 0..output.len(),
|
||||
icon: IconName::Book,
|
||||
label: "Project context".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
});
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text: output,
|
||||
sections,
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(JsonSchema, Deserialize)]
|
||||
struct SearchQueries {
|
||||
/// An array of semantic search queries.
|
||||
///
|
||||
/// These queries will be used to search the user's codebase.
|
||||
/// The function can only accept 4 queries, otherwise it will error.
|
||||
/// As such, it's important that you limit the length of the search_queries array to 5 queries or less.
|
||||
search_queries: Vec<String>,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for SearchQueries {
|
||||
fn name() -> String {
|
||||
"search_queries".to_string()
|
||||
}
|
||||
|
||||
fn description() -> String {
|
||||
"Generate semantic search queries based on context".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use anyhow::Result;
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LineEnding, LspAdapterDelegate};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use semantic_index::{LoadedSearchResult, SemanticDb};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
@@ -101,7 +101,7 @@ impl SlashCommand for SearchSlashCommand {
|
||||
cx.spawn(|cx| async move {
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
project_index.search(query.clone(), limit.unwrap_or(5), cx)
|
||||
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
@@ -112,31 +112,8 @@ impl SlashCommand for SearchSlashCommand {
|
||||
.spawn(async move {
|
||||
let mut text = format!("Search results for {query}:\n");
|
||||
let mut sections = Vec::new();
|
||||
for LoadedSearchResult {
|
||||
path,
|
||||
range,
|
||||
full_path,
|
||||
file_content,
|
||||
row_range,
|
||||
} in loaded_results
|
||||
{
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(
|
||||
Some(&path),
|
||||
Some(row_range.clone()),
|
||||
));
|
||||
|
||||
let mut excerpt = file_content[range].to_string();
|
||||
LineEnding::normalize(&mut excerpt);
|
||||
text.push_str(&excerpt);
|
||||
writeln!(text, "\n```\n").unwrap();
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
Some(&full_path),
|
||||
false,
|
||||
Some(row_range.start() + 1..row_range.end() + 1),
|
||||
));
|
||||
for loaded_result in &loaded_results {
|
||||
add_search_result_section(loaded_result, &mut text, &mut sections);
|
||||
}
|
||||
|
||||
let query = SharedString::from(query);
|
||||
@@ -159,3 +136,35 @@ impl SlashCommand for SearchSlashCommand {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_search_result_section(
|
||||
loaded_result: &LoadedSearchResult,
|
||||
text: &mut String,
|
||||
sections: &mut Vec<SlashCommandOutputSection<usize>>,
|
||||
) {
|
||||
let LoadedSearchResult {
|
||||
path,
|
||||
full_path,
|
||||
excerpt_content,
|
||||
row_range,
|
||||
..
|
||||
} = loaded_result;
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(
|
||||
Some(&path),
|
||||
Some(row_range.clone()),
|
||||
));
|
||||
|
||||
text.push_str(&excerpt_content);
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```\n").unwrap();
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
Some(&full_path),
|
||||
false,
|
||||
Some(row_range.start() + 1..row_range.end() + 1),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -10,9 +10,9 @@ pub struct SlashCommandSettings {
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[serde(default)]
|
||||
pub docs: DocsCommandSettings,
|
||||
/// Settings for the `/project` slash command.
|
||||
/// Settings for the `/cargo-workspace` slash command.
|
||||
#[serde(default)]
|
||||
pub project: ProjectCommandSettings,
|
||||
pub cargo_workspace: CargoWorkspaceCommandSettings,
|
||||
}
|
||||
|
||||
/// Settings for the `/docs` slash command.
|
||||
@@ -23,10 +23,10 @@ pub struct DocsCommandSettings {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
/// Settings for the `/project` slash command.
|
||||
/// Settings for the `/cargo-workspace` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct ProjectCommandSettings {
|
||||
/// Whether `/project` is enabled.
|
||||
pub struct CargoWorkspaceCommandSettings {
|
||||
/// Whether `/cargo-workspace` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
@@ -284,7 +284,7 @@ impl TerminalInlineAssistant {
|
||||
messages,
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: 1.0,
|
||||
temperature: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -187,6 +187,7 @@ impl WorkflowSuggestion {
|
||||
suggestion_range,
|
||||
initial_prompt,
|
||||
initial_transaction_id,
|
||||
false,
|
||||
Some(workspace.clone()),
|
||||
Some(assistant_panel),
|
||||
cx,
|
||||
|
||||
@@ -264,6 +264,18 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
|
||||
|
||||
fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
||||
let release_channel = ReleaseChannel::global(cx);
|
||||
|
||||
let url = match release_channel {
|
||||
ReleaseChannel::Nightly => Some("https://github.com/zed-industries/zed/commits/nightly/"),
|
||||
ReleaseChannel::Dev => Some("https://github.com/zed-industries/zed/commits/main/"),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(url) = url {
|
||||
cx.open_url(url);
|
||||
return;
|
||||
}
|
||||
|
||||
let version = AppVersion::global(cx).to_string();
|
||||
|
||||
let client = client::Client::global(cx).http_client();
|
||||
@@ -345,15 +357,17 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
|
||||
let should_show_notification = should_show_notification.await?;
|
||||
if should_show_notification {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
workspace.show_notification(
|
||||
NotificationId::unique::<UpdateNotification>(),
|
||||
cx,
|
||||
|cx| cx.new_view(|_| UpdateNotification::new(version)),
|
||||
|cx| cx.new_view(|_| UpdateNotification::new(version, workspace_handle)),
|
||||
);
|
||||
updater
|
||||
.read(cx)
|
||||
.set_should_show_update_notification(false, cx)
|
||||
.detach_and_log_err(cx);
|
||||
updater.update(cx, |updater, cx| {
|
||||
updater
|
||||
.set_should_show_update_notification(false, cx)
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
use gpui::{
|
||||
div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render,
|
||||
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext,
|
||||
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext, WeakView,
|
||||
};
|
||||
use menu::Cancel;
|
||||
use release_channel::ReleaseChannel;
|
||||
use workspace::ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt};
|
||||
use util::ResultExt;
|
||||
use workspace::{
|
||||
ui::{h_flex, v_flex, Icon, IconName, Label, StyledExt},
|
||||
Workspace,
|
||||
};
|
||||
|
||||
pub struct UpdateNotification {
|
||||
version: SemanticVersion,
|
||||
workspace: WeakView<Workspace>,
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for UpdateNotification {}
|
||||
@@ -41,7 +46,11 @@ impl Render for UpdateNotification {
|
||||
.child(Label::new("View the release notes"))
|
||||
.cursor_pointer()
|
||||
.on_click(cx.listener(|this, _, cx| {
|
||||
crate::view_release_notes(&Default::default(), cx);
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
crate::view_release_notes_locally(workspace, cx);
|
||||
})
|
||||
.log_err();
|
||||
this.dismiss(&menu::Cancel, cx)
|
||||
})),
|
||||
)
|
||||
@@ -49,8 +58,8 @@ impl Render for UpdateNotification {
|
||||
}
|
||||
|
||||
impl UpdateNotification {
|
||||
pub fn new(version: SemanticVersion) -> Self {
|
||||
Self { version }
|
||||
pub fn new(version: SemanticVersion, workspace: WeakView<Workspace>) -> Self {
|
||||
Self { version, workspace }
|
||||
}
|
||||
|
||||
pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription,
|
||||
ViewContext,
|
||||
Element, EventEmitter, FocusableView, IntoElement, ParentElement, Render, StyledText,
|
||||
Subscription, ViewContext,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::cmp;
|
||||
@@ -90,17 +90,30 @@ impl Render for Breadcrumbs {
|
||||
ButtonLike::new("toggle outline view")
|
||||
.child(breadcrumbs_stack)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(move |_, cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
move |_, cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
outline::toggle(editor, &editor::actions::ToggleOutline, cx)
|
||||
}
|
||||
}
|
||||
})
|
||||
.tooltip(|cx| {
|
||||
Tooltip::for_action(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
cx,
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
let focus_handle = editor.read(cx).focus_handle(cx);
|
||||
Tooltip::for_action_in(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
&focus_handle,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Tooltip::for_action(
|
||||
"Show symbol outline",
|
||||
&editor::actions::ToggleOutline,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}),
|
||||
),
|
||||
None => element
|
||||
|
||||
@@ -66,7 +66,7 @@ impl ChannelBuffer {
|
||||
let capability = channel_store.read(cx).channel_capability(channel.id);
|
||||
language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
|
||||
})?;
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??;
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
|
||||
|
||||
let subscription = client.subscribe_to_entity(channel.id.0)?;
|
||||
|
||||
@@ -151,7 +151,7 @@ impl ChannelBuffer {
|
||||
cx.notify();
|
||||
this.buffer
|
||||
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
|
||||
})??;
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -175,7 +175,10 @@ impl ChannelBuffer {
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
language::BufferEvent::Operation(operation) => {
|
||||
language::BufferEvent::Operation {
|
||||
operation,
|
||||
is_local: true,
|
||||
} => {
|
||||
if *ZED_ALWAYS_ACTIVE {
|
||||
if let language::Operation::UpdateSelections { selections, .. } = operation {
|
||||
if selections.is_empty() {
|
||||
|
||||
@@ -808,7 +808,7 @@ pub fn mentions_to_proto(mentions: &[(Range<usize>, UserId)]) -> Vec<proto::Chat
|
||||
impl sum_tree::Item for ChannelMessage {
|
||||
type Summary = ChannelMessageSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
ChannelMessageSummary {
|
||||
max_id: self.id,
|
||||
count: 1,
|
||||
|
||||
@@ -1007,7 +1007,7 @@ impl ChannelStore {
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
buffer.apply_ops(incoming_operations, cx)?;
|
||||
buffer.apply_ops(incoming_operations, cx);
|
||||
anyhow::Ok(outgoing_operations)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
@@ -1621,6 +1621,10 @@ impl ProtoClient for Client {
|
||||
fn message_handler_set(&self) -> &parking_lot::Mutex<ProtoMessageHandlerSet> {
|
||||
&self.handler_set
|
||||
}
|
||||
|
||||
fn is_via_collab(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
|
||||
@@ -9,6 +9,8 @@ use std::{
|
||||
|
||||
pub use system_clock::*;
|
||||
|
||||
pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX;
|
||||
|
||||
/// A unique identifier for each distributed node.
|
||||
pub type ReplicaId = u16;
|
||||
|
||||
@@ -25,7 +27,10 @@ pub struct Lamport {
|
||||
|
||||
/// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock).
|
||||
#[derive(Clone, Default, Hash, Eq, PartialEq)]
|
||||
pub struct Global(SmallVec<[u32; 8]>);
|
||||
pub struct Global {
|
||||
values: SmallVec<[u32; 8]>,
|
||||
local_branch_value: u32,
|
||||
}
|
||||
|
||||
impl Global {
|
||||
pub fn new() -> Self {
|
||||
@@ -33,41 +38,51 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn get(&self, replica_id: ReplicaId) -> Seq {
|
||||
self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq
|
||||
if replica_id == LOCAL_BRANCH_REPLICA_ID {
|
||||
self.local_branch_value
|
||||
} else {
|
||||
self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq
|
||||
}
|
||||
}
|
||||
|
||||
pub fn observe(&mut self, timestamp: Lamport) {
|
||||
if timestamp.value > 0 {
|
||||
let new_len = timestamp.replica_id as usize + 1;
|
||||
if new_len > self.0.len() {
|
||||
self.0.resize(new_len, 0);
|
||||
}
|
||||
if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID {
|
||||
self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value);
|
||||
} else {
|
||||
let new_len = timestamp.replica_id as usize + 1;
|
||||
if new_len > self.values.len() {
|
||||
self.values.resize(new_len, 0);
|
||||
}
|
||||
|
||||
let entry = &mut self.0[timestamp.replica_id as usize];
|
||||
*entry = cmp::max(*entry, timestamp.value);
|
||||
let entry = &mut self.values[timestamp.replica_id as usize];
|
||||
*entry = cmp::max(*entry, timestamp.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join(&mut self, other: &Self) {
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
if other.values.len() > self.values.len() {
|
||||
self.values.resize(other.values.len(), 0);
|
||||
}
|
||||
|
||||
for (left, right) in self.0.iter_mut().zip(&other.0) {
|
||||
for (left, right) in self.values.iter_mut().zip(&other.values) {
|
||||
*left = cmp::max(*left, *right);
|
||||
}
|
||||
|
||||
self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value);
|
||||
}
|
||||
|
||||
pub fn meet(&mut self, other: &Self) {
|
||||
if other.0.len() > self.0.len() {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
if other.values.len() > self.values.len() {
|
||||
self.values.resize(other.values.len(), 0);
|
||||
}
|
||||
|
||||
let mut new_len = 0;
|
||||
for (ix, (left, right)) in self
|
||||
.0
|
||||
.values
|
||||
.iter_mut()
|
||||
.zip(other.0.iter().chain(iter::repeat(&0)))
|
||||
.zip(other.values.iter().chain(iter::repeat(&0)))
|
||||
.enumerate()
|
||||
{
|
||||
if *left == 0 {
|
||||
@@ -80,7 +95,8 @@ impl Global {
|
||||
new_len = ix + 1;
|
||||
}
|
||||
}
|
||||
self.0.resize(new_len, 0);
|
||||
self.values.resize(new_len, 0);
|
||||
self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value);
|
||||
}
|
||||
|
||||
pub fn observed(&self, timestamp: Lamport) -> bool {
|
||||
@@ -88,34 +104,44 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn observed_any(&self, other: &Self) -> bool {
|
||||
self.0
|
||||
self.values
|
||||
.iter()
|
||||
.zip(other.0.iter())
|
||||
.zip(other.values.iter())
|
||||
.any(|(left, right)| *right > 0 && left >= right)
|
||||
|| (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value)
|
||||
}
|
||||
|
||||
pub fn observed_all(&self, other: &Self) -> bool {
|
||||
let mut rhs = other.0.iter();
|
||||
self.0.iter().all(|left| match rhs.next() {
|
||||
let mut rhs = other.values.iter();
|
||||
self.values.iter().all(|left| match rhs.next() {
|
||||
Some(right) => left >= right,
|
||||
None => true,
|
||||
}) && rhs.next().is_none()
|
||||
&& self.local_branch_value >= other.local_branch_value
|
||||
}
|
||||
|
||||
pub fn changed_since(&self, other: &Self) -> bool {
|
||||
self.0.len() > other.0.len()
|
||||
self.values.len() > other.values.len()
|
||||
|| self
|
||||
.0
|
||||
.values
|
||||
.iter()
|
||||
.zip(other.0.iter())
|
||||
.zip(other.values.iter())
|
||||
.any(|(left, right)| left > right)
|
||||
|| self.local_branch_value > other.local_branch_value
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = Lamport> + '_ {
|
||||
self.0.iter().enumerate().map(|(replica_id, seq)| Lamport {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
})
|
||||
self.values
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(replica_id, seq)| Lamport {
|
||||
replica_id: replica_id as ReplicaId,
|
||||
value: *seq,
|
||||
})
|
||||
.chain((self.local_branch_value > 0).then_some(Lamport {
|
||||
replica_id: LOCAL_BRANCH_REPLICA_ID,
|
||||
value: self.local_branch_value,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,6 +218,9 @@ impl fmt::Debug for Global {
|
||||
}
|
||||
write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?;
|
||||
}
|
||||
if self.local_branch_value > 0 {
|
||||
write!(f, "<branch>: {}", self.local_branch_value)?;
|
||||
}
|
||||
write!(f, "}}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,8 +23,7 @@ To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
```json
|
||||
{
|
||||
"admins": ["yourgithubhere"],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 20
|
||||
"channels": ["zed"]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -149,18 +149,6 @@ spec:
|
||||
secretKeyRef:
|
||||
name: google-ai
|
||||
key: api_key
|
||||
- name: RUNPOD_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: runpod
|
||||
key: api_key
|
||||
optional: true
|
||||
- name: RUNPOD_API_SUMMARY_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: runpod
|
||||
key: summary
|
||||
optional: true
|
||||
- name: BLOB_STORE_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
||||
@@ -8,6 +8,5 @@
|
||||
"JosephTLyons",
|
||||
"rgbkrk"
|
||||
],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 100
|
||||
"channels": ["zed"]
|
||||
}
|
||||
|
||||
602
crates/collab/seed/github_users.json
Normal file
602
crates/collab/seed/github_users.json
Normal file
@@ -0,0 +1,602 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"login": "mojombo",
|
||||
"email": "tom@mojombo.com",
|
||||
"created_at": "2007-10-20T05:24:19Z"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"login": "defunkt",
|
||||
"email": null,
|
||||
"created_at": "2007-10-20T05:24:19Z"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"login": "pjhyett",
|
||||
"email": "pj@hyett.com",
|
||||
"created_at": "2008-01-07T17:54:22Z"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"login": "wycats",
|
||||
"email": "wycats@gmail.com",
|
||||
"created_at": "2008-01-12T05:38:33Z"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"login": "ezmobius",
|
||||
"email": null,
|
||||
"created_at": "2008-01-12T07:51:46Z"
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"login": "ivey",
|
||||
"email": "ivey@gweezlebur.com",
|
||||
"created_at": "2008-01-12T15:15:00Z"
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"login": "evanphx",
|
||||
"email": "evan@phx.io",
|
||||
"created_at": "2008-01-12T16:46:24Z"
|
||||
},
|
||||
{
|
||||
"id": 17,
|
||||
"login": "vanpelt",
|
||||
"email": "vanpelt@wandb.com",
|
||||
"created_at": "2008-01-13T05:57:18Z"
|
||||
},
|
||||
{
|
||||
"id": 18,
|
||||
"login": "wayneeseguin",
|
||||
"email": "wayneeseguin@gmail.com",
|
||||
"created_at": "2008-01-13T06:02:21Z"
|
||||
},
|
||||
{
|
||||
"id": 19,
|
||||
"login": "brynary",
|
||||
"email": null,
|
||||
"created_at": "2008-01-13T10:19:47Z"
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"login": "kevinclark",
|
||||
"email": "kevin.clark@gmail.com",
|
||||
"created_at": "2008-01-13T18:33:26Z"
|
||||
},
|
||||
{
|
||||
"id": 21,
|
||||
"login": "technoweenie",
|
||||
"email": "technoweenie@hey.com",
|
||||
"created_at": "2008-01-14T04:33:35Z"
|
||||
},
|
||||
{
|
||||
"id": 22,
|
||||
"login": "macournoyer",
|
||||
"email": "macournoyer@gmail.com",
|
||||
"created_at": "2008-01-14T10:49:35Z"
|
||||
},
|
||||
{
|
||||
"id": 23,
|
||||
"login": "takeo",
|
||||
"email": "toby@takeo.email",
|
||||
"created_at": "2008-01-14T11:25:49Z"
|
||||
},
|
||||
{
|
||||
"id": 25,
|
||||
"login": "caged",
|
||||
"email": "encytemedia@gmail.com",
|
||||
"created_at": "2008-01-15T04:47:24Z"
|
||||
},
|
||||
{
|
||||
"id": 26,
|
||||
"login": "topfunky",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T05:40:05Z"
|
||||
},
|
||||
{
|
||||
"id": 27,
|
||||
"login": "anotherjesse",
|
||||
"email": "anotherjesse@gmail.com",
|
||||
"created_at": "2008-01-15T07:49:30Z"
|
||||
},
|
||||
{
|
||||
"id": 28,
|
||||
"login": "roland",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T08:12:51Z"
|
||||
},
|
||||
{
|
||||
"id": 29,
|
||||
"login": "lukas",
|
||||
"email": "lukas@wandb.com",
|
||||
"created_at": "2008-01-15T12:50:02Z"
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"login": "fanvsfan",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T14:15:23Z"
|
||||
},
|
||||
{
|
||||
"id": 31,
|
||||
"login": "tomtt",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T15:44:31Z"
|
||||
},
|
||||
{
|
||||
"id": 32,
|
||||
"login": "railsjitsu",
|
||||
"email": null,
|
||||
"created_at": "2008-01-16T04:57:23Z"
|
||||
},
|
||||
{
|
||||
"id": 34,
|
||||
"login": "nitay",
|
||||
"email": null,
|
||||
"created_at": "2008-01-18T14:09:11Z"
|
||||
},
|
||||
{
|
||||
"id": 35,
|
||||
"login": "kevwil",
|
||||
"email": null,
|
||||
"created_at": "2008-01-19T05:50:12Z"
|
||||
},
|
||||
{
|
||||
"id": 36,
|
||||
"login": "KirinDave",
|
||||
"email": null,
|
||||
"created_at": "2008-01-19T08:01:02Z"
|
||||
},
|
||||
{
|
||||
"id": 37,
|
||||
"login": "jamesgolick",
|
||||
"email": "jamesgolick@gmail.com",
|
||||
"created_at": "2008-01-19T22:52:30Z"
|
||||
},
|
||||
{
|
||||
"id": 38,
|
||||
"login": "atmos",
|
||||
"email": "atmos@atmos.org",
|
||||
"created_at": "2008-01-22T09:14:11Z"
|
||||
},
|
||||
{
|
||||
"id": 44,
|
||||
"login": "errfree",
|
||||
"email": null,
|
||||
"created_at": "2008-01-24T02:08:37Z"
|
||||
},
|
||||
{
|
||||
"id": 45,
|
||||
"login": "mojodna",
|
||||
"email": null,
|
||||
"created_at": "2008-01-24T04:40:22Z"
|
||||
},
|
||||
{
|
||||
"id": 46,
|
||||
"login": "bmizerany",
|
||||
"email": "blake.mizerany@gmail.com",
|
||||
"created_at": "2008-01-24T04:44:30Z"
|
||||
},
|
||||
{
|
||||
"id": 47,
|
||||
"login": "jnewland",
|
||||
"email": "jesse@jnewland.com",
|
||||
"created_at": "2008-01-25T02:28:12Z"
|
||||
},
|
||||
{
|
||||
"id": 48,
|
||||
"login": "joshknowles",
|
||||
"email": "joshknowles@gmail.com",
|
||||
"created_at": "2008-01-25T21:30:42Z"
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"login": "hornbeck",
|
||||
"email": "hornbeck@gmail.com",
|
||||
"created_at": "2008-01-25T21:49:23Z"
|
||||
},
|
||||
{
|
||||
"id": 50,
|
||||
"login": "jwhitmire",
|
||||
"email": "jeff@jwhitmire.com",
|
||||
"created_at": "2008-01-25T22:07:48Z"
|
||||
},
|
||||
{
|
||||
"id": 51,
|
||||
"login": "elbowdonkey",
|
||||
"email": null,
|
||||
"created_at": "2008-01-25T22:08:20Z"
|
||||
},
|
||||
{
|
||||
"id": 52,
|
||||
"login": "reinh",
|
||||
"email": null,
|
||||
"created_at": "2008-01-25T22:16:29Z"
|
||||
},
|
||||
{
|
||||
"id": 53,
|
||||
"login": "knzai",
|
||||
"email": "git@knz.ai",
|
||||
"created_at": "2008-01-25T22:33:10Z"
|
||||
},
|
||||
{
|
||||
"id": 68,
|
||||
"login": "bs",
|
||||
"email": "yap@bri.tt",
|
||||
"created_at": "2008-01-27T01:46:29Z"
|
||||
},
|
||||
{
|
||||
"id": 69,
|
||||
"login": "rsanheim",
|
||||
"email": null,
|
||||
"created_at": "2008-01-27T07:09:47Z"
|
||||
},
|
||||
{
|
||||
"id": 70,
|
||||
"login": "schacon",
|
||||
"email": "schacon@gmail.com",
|
||||
"created_at": "2008-01-27T17:19:28Z"
|
||||
},
|
||||
{
|
||||
"id": 71,
|
||||
"login": "uggedal",
|
||||
"email": null,
|
||||
"created_at": "2008-01-27T22:18:57Z"
|
||||
},
|
||||
{
|
||||
"id": 72,
|
||||
"login": "bruce",
|
||||
"email": "brwcodes@gmail.com",
|
||||
"created_at": "2008-01-28T07:16:45Z"
|
||||
},
|
||||
{
|
||||
"id": 73,
|
||||
"login": "sam",
|
||||
"email": "ssmoot@gmail.com",
|
||||
"created_at": "2008-01-28T19:01:26Z"
|
||||
},
|
||||
{
|
||||
"id": 74,
|
||||
"login": "mmower",
|
||||
"email": "self@mattmower.com",
|
||||
"created_at": "2008-01-28T19:47:50Z"
|
||||
},
|
||||
{
|
||||
"id": 75,
|
||||
"login": "abhay",
|
||||
"email": null,
|
||||
"created_at": "2008-01-28T21:08:23Z"
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"login": "rabble",
|
||||
"email": "evan@protest.net",
|
||||
"created_at": "2008-01-28T23:27:02Z"
|
||||
},
|
||||
{
|
||||
"id": 77,
|
||||
"login": "benburkert",
|
||||
"email": "ben@benburkert.com",
|
||||
"created_at": "2008-01-28T23:44:14Z"
|
||||
},
|
||||
{
|
||||
"id": 78,
|
||||
"login": "indirect",
|
||||
"email": "andre@arko.net",
|
||||
"created_at": "2008-01-29T07:59:27Z"
|
||||
},
|
||||
{
|
||||
"id": 79,
|
||||
"login": "fearoffish",
|
||||
"email": "me@fearof.fish",
|
||||
"created_at": "2008-01-29T08:43:10Z"
|
||||
},
|
||||
{
|
||||
"id": 80,
|
||||
"login": "ry",
|
||||
"email": "ry@tinyclouds.org",
|
||||
"created_at": "2008-01-29T08:50:34Z"
|
||||
},
|
||||
{
|
||||
"id": 81,
|
||||
"login": "engineyard",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T09:51:30Z"
|
||||
},
|
||||
{
|
||||
"id": 82,
|
||||
"login": "jsierles",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T11:10:25Z"
|
||||
},
|
||||
{
|
||||
"id": 83,
|
||||
"login": "tweibley",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T13:52:07Z"
|
||||
},
|
||||
{
|
||||
"id": 84,
|
||||
"login": "peimei",
|
||||
"email": "james@railsjitsu.com",
|
||||
"created_at": "2008-01-29T15:44:11Z"
|
||||
},
|
||||
{
|
||||
"id": 85,
|
||||
"login": "brixen",
|
||||
"email": "brixen@gmail.com",
|
||||
"created_at": "2008-01-29T16:47:55Z"
|
||||
},
|
||||
{
|
||||
"id": 87,
|
||||
"login": "tmornini",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T18:43:39Z"
|
||||
},
|
||||
{
|
||||
"id": 88,
|
||||
"login": "outerim",
|
||||
"email": "lee@outerim.com",
|
||||
"created_at": "2008-01-29T18:48:32Z"
|
||||
},
|
||||
{
|
||||
"id": 89,
|
||||
"login": "daksis",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T19:18:16Z"
|
||||
},
|
||||
{
|
||||
"id": 90,
|
||||
"login": "sr",
|
||||
"email": "me@simonrozet.com",
|
||||
"created_at": "2008-01-29T20:37:53Z"
|
||||
},
|
||||
{
|
||||
"id": 91,
|
||||
"login": "lifo",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:09:30Z"
|
||||
},
|
||||
{
|
||||
"id": 92,
|
||||
"login": "rsl",
|
||||
"email": "sconds@gmail.com",
|
||||
"created_at": "2008-01-29T23:13:36Z"
|
||||
},
|
||||
{
|
||||
"id": 93,
|
||||
"login": "imownbey",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:13:44Z"
|
||||
},
|
||||
{
|
||||
"id": 94,
|
||||
"login": "dylanegan",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:15:18Z"
|
||||
},
|
||||
{
|
||||
"id": 95,
|
||||
"login": "jm",
|
||||
"email": "jeremymcanally@gmail.com",
|
||||
"created_at": "2008-01-29T23:15:32Z"
|
||||
},
|
||||
{
|
||||
"id": 100,
|
||||
"login": "kmarsh",
|
||||
"email": "kevin.marsh@gmail.com",
|
||||
"created_at": "2008-01-29T23:48:24Z"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"login": "jvantuyl",
|
||||
"email": "jayson@aggressive.ly",
|
||||
"created_at": "2008-01-30T01:11:50Z"
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"login": "BrianTheCoder",
|
||||
"email": "wbsmith83@gmail.com",
|
||||
"created_at": "2008-01-30T02:22:32Z"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"login": "freeformz",
|
||||
"email": "freeformz@gmail.com",
|
||||
"created_at": "2008-01-30T06:19:57Z"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"login": "hassox",
|
||||
"email": "dneighman@gmail.com",
|
||||
"created_at": "2008-01-30T06:31:06Z"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"login": "automatthew",
|
||||
"email": "automatthew@gmail.com",
|
||||
"created_at": "2008-01-30T19:00:58Z"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"login": "queso",
|
||||
"email": "Joshua.owens@gmail.com",
|
||||
"created_at": "2008-01-30T19:48:45Z"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"login": "lancecarlson",
|
||||
"email": null,
|
||||
"created_at": "2008-01-30T19:53:29Z"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"login": "drnic",
|
||||
"email": "drnicwilliams@gmail.com",
|
||||
"created_at": "2008-01-30T23:19:18Z"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"login": "lukesutton",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T04:01:02Z"
|
||||
},
|
||||
{
|
||||
"id": 110,
|
||||
"login": "danwrong",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T08:51:31Z"
|
||||
},
|
||||
{
|
||||
"id": 111,
|
||||
"login": "HamptonMakes",
|
||||
"email": "hampton@hamptoncatlin.com",
|
||||
"created_at": "2008-01-31T17:03:51Z"
|
||||
},
|
||||
{
|
||||
"id": 112,
|
||||
"login": "jfrost",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T22:14:27Z"
|
||||
},
|
||||
{
|
||||
"id": 113,
|
||||
"login": "mattetti",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T22:56:31Z"
|
||||
},
|
||||
{
|
||||
"id": 114,
|
||||
"login": "ctennis",
|
||||
"email": "c@leb.tennis",
|
||||
"created_at": "2008-01-31T23:43:14Z"
|
||||
},
|
||||
{
|
||||
"id": 115,
|
||||
"login": "lawrencepit",
|
||||
"email": "lawrence.pit@gmail.com",
|
||||
"created_at": "2008-01-31T23:57:16Z"
|
||||
},
|
||||
{
|
||||
"id": 116,
|
||||
"login": "marcjeanson",
|
||||
"email": "github@marcjeanson.com",
|
||||
"created_at": "2008-02-01T01:27:19Z"
|
||||
},
|
||||
{
|
||||
"id": 117,
|
||||
"login": "grempe",
|
||||
"email": null,
|
||||
"created_at": "2008-02-01T04:12:42Z"
|
||||
},
|
||||
{
|
||||
"id": 118,
|
||||
"login": "peterc",
|
||||
"email": "git@peterc.org",
|
||||
"created_at": "2008-02-02T01:00:36Z"
|
||||
},
|
||||
{
|
||||
"id": 119,
|
||||
"login": "ministrycentered",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T03:50:26Z"
|
||||
},
|
||||
{
|
||||
"id": 120,
|
||||
"login": "afarnham",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T05:11:03Z"
|
||||
},
|
||||
{
|
||||
"id": 121,
|
||||
"login": "up_the_irons",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T10:59:51Z"
|
||||
},
|
||||
{
|
||||
"id": 122,
|
||||
"login": "cristibalan",
|
||||
"email": "cristibalan@gmail.com",
|
||||
"created_at": "2008-02-02T11:29:45Z"
|
||||
},
|
||||
{
|
||||
"id": 123,
|
||||
"login": "heavysixer",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T15:06:53Z"
|
||||
},
|
||||
{
|
||||
"id": 124,
|
||||
"login": "brosner",
|
||||
"email": "brosner@gmail.com",
|
||||
"created_at": "2008-02-02T19:03:54Z"
|
||||
},
|
||||
{
|
||||
"id": 125,
|
||||
"login": "danielmorrison",
|
||||
"email": "daniel@collectiveidea.com",
|
||||
"created_at": "2008-02-02T19:46:35Z"
|
||||
},
|
||||
{
|
||||
"id": 126,
|
||||
"login": "danielharan",
|
||||
"email": "chebuctonian@gmail.com",
|
||||
"created_at": "2008-02-02T21:42:21Z"
|
||||
},
|
||||
{
|
||||
"id": 127,
|
||||
"login": "kvnsmth",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T22:00:03Z"
|
||||
},
|
||||
{
|
||||
"id": 128,
|
||||
"login": "collectiveidea",
|
||||
"email": "info@collectiveidea.com",
|
||||
"created_at": "2008-02-02T22:34:46Z"
|
||||
},
|
||||
{
|
||||
"id": 129,
|
||||
"login": "canadaduane",
|
||||
"email": "duane.johnson@gmail.com",
|
||||
"created_at": "2008-02-02T23:25:39Z"
|
||||
},
|
||||
{
|
||||
"id": 130,
|
||||
"login": "corasaurus-hex",
|
||||
"email": "cora@sutton.me",
|
||||
"created_at": "2008-02-03T04:20:22Z"
|
||||
},
|
||||
{
|
||||
"id": 131,
|
||||
"login": "dstrelau",
|
||||
"email": null,
|
||||
"created_at": "2008-02-03T14:59:12Z"
|
||||
},
|
||||
{
|
||||
"id": 132,
|
||||
"login": "sunny",
|
||||
"email": "sunny@sunfox.org",
|
||||
"created_at": "2008-02-03T15:43:43Z"
|
||||
},
|
||||
{
|
||||
"id": 133,
|
||||
"login": "dkubb",
|
||||
"email": "github@dan.kubb.ca",
|
||||
"created_at": "2008-02-03T20:40:13Z"
|
||||
},
|
||||
{
|
||||
"id": 134,
|
||||
"login": "jnicklas",
|
||||
"email": "jonas@jnicklas.com",
|
||||
"created_at": "2008-02-03T20:43:50Z"
|
||||
},
|
||||
{
|
||||
"id": 135,
|
||||
"login": "richcollins",
|
||||
"email": "richcollins@gmail.com",
|
||||
"created_at": "2008-02-03T21:11:25Z"
|
||||
}
|
||||
]
|
||||
@@ -18,8 +18,8 @@ use sha2::{Digest, Sha256};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
|
||||
SettingEvent,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic,
|
||||
ReplEvent, SettingEvent,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
@@ -296,10 +296,11 @@ pub async fn post_panic(
|
||||
version = %panic.app_version,
|
||||
os_name = %panic.os_name,
|
||||
os_version = %panic.os_version.clone().unwrap_or_default(),
|
||||
installation_id = %panic.installation_id.unwrap_or_default(),
|
||||
installation_id = %panic.installation_id.clone().unwrap_or_default(),
|
||||
description = %panic.payload,
|
||||
backtrace = %panic.backtrace.join("\n"),
|
||||
"panic report");
|
||||
"panic report"
|
||||
);
|
||||
|
||||
let backtrace = if panic.backtrace.len() > 25 {
|
||||
let total = panic.backtrace.len();
|
||||
@@ -317,6 +318,11 @@ pub async fn post_panic(
|
||||
} else {
|
||||
panic.backtrace.join("\n")
|
||||
};
|
||||
|
||||
if !report_to_slack(&panic) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let backtrace_with_summary = panic.payload + "\n" + &backtrace;
|
||||
|
||||
if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() {
|
||||
@@ -357,6 +363,25 @@ pub async fn post_panic(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn report_to_slack(panic: &Panic) -> bool {
|
||||
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic.payload.contains("ERROR_INITIALIZATION_FAILED") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic
|
||||
.payload
|
||||
.contains("GPU has crashed, and no debug information is available")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn post_events(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
TypedHeader(ZedChecksumHeader(checksum)): TypedHeader<ZedChecksumHeader>,
|
||||
@@ -882,7 +907,6 @@ impl AssistantEventRow {
|
||||
|
||||
#[derive(Debug, clickhouse::Row, Serialize)]
|
||||
pub struct CpuEventRow {
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
@@ -921,7 +945,6 @@ impl CpuEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
@@ -945,7 +968,6 @@ pub struct MemoryEventRow {
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
@@ -977,7 +999,6 @@ impl MemoryEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
@@ -1001,7 +1022,6 @@ pub struct AppEventRow {
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
@@ -1032,7 +1052,6 @@ impl AppEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
@@ -1055,7 +1074,6 @@ pub struct SettingEventRow {
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
@@ -1086,7 +1104,6 @@ impl SettingEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
@@ -1110,7 +1127,6 @@ pub struct ExtensionEventRow {
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
session_id: Option<String>,
|
||||
is_staff: Option<bool>,
|
||||
@@ -1146,7 +1162,6 @@ impl ExtensionEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
@@ -1237,7 +1252,6 @@ pub struct EditEventRow {
|
||||
os_version: String,
|
||||
|
||||
// ClientEventBase
|
||||
system_id: Option<String>,
|
||||
installation_id: Option<String>,
|
||||
// Note: This column name has a typo in the ClickHouse table.
|
||||
#[serde(rename = "sesssion_id")]
|
||||
@@ -1275,7 +1289,6 @@ impl EditEventRow {
|
||||
release_channel: body.release_channel.clone().unwrap_or_default(),
|
||||
os_name: body.os_name.clone(),
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
system_id: body.system_id.clone(),
|
||||
installation_id: body.installation_id.clone(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
|
||||
@@ -32,6 +32,7 @@ macro_rules! id_type {
|
||||
#[allow(unused)]
|
||||
#[allow(missing_docs)]
|
||||
pub fn from_proto(value: u64) -> Self {
|
||||
debug_assert!(value != 0);
|
||||
Self(value as i32)
|
||||
}
|
||||
|
||||
|
||||
@@ -689,9 +689,7 @@ impl Database {
|
||||
}
|
||||
|
||||
let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text);
|
||||
text_buffer
|
||||
.apply_ops(operations.into_iter().filter_map(operation_from_wire))
|
||||
.unwrap();
|
||||
text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire));
|
||||
|
||||
let base_text = text_buffer.text();
|
||||
let epoch = buffer.epoch + 1;
|
||||
|
||||
@@ -285,7 +285,7 @@ impl Database {
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.ok_or_else(|| anyhow!("no such project: {project_id}"))?;
|
||||
|
||||
// Update metadata.
|
||||
worktree::Entity::update(worktree::ActiveModel {
|
||||
|
||||
@@ -298,6 +298,12 @@ impl Database {
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns all feature flags.
|
||||
pub async fn list_feature_flags(&self) -> Result<Vec<feature_flag::Model>> {
|
||||
self.transaction(|tx| async move { Ok(feature_flag::Entity::find().all(&*tx).await?) })
|
||||
.await
|
||||
}
|
||||
|
||||
/// Creates a new feature flag.
|
||||
pub async fn create_user_flag(&self, flag: &str, enabled_for_all: bool) -> Result<FlagId> {
|
||||
self.transaction(|tx| async move {
|
||||
|
||||
@@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
text::BufferId::new(1).unwrap(),
|
||||
buffer_response_b.base_text,
|
||||
);
|
||||
buffer_b
|
||||
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
|
||||
let operation = proto::deserialize_operation(operation).unwrap();
|
||||
if let language::Operation::Buffer(operation) = operation {
|
||||
operation
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
|
||||
let operation = proto::deserialize_operation(operation).unwrap();
|
||||
if let language::Operation::Buffer(operation) = operation {
|
||||
operation
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}));
|
||||
|
||||
assert_eq!(buffer_b.text(), "hello, cruel world");
|
||||
|
||||
|
||||
@@ -170,8 +170,6 @@ pub struct Config {
|
||||
pub anthropic_api_key: Option<Arc<str>>,
|
||||
pub anthropic_staff_api_key: Option<Arc<str>>,
|
||||
pub llm_closed_beta_model_name: Option<Arc<str>>,
|
||||
pub runpod_api_key: Option<Arc<str>>,
|
||||
pub runpod_api_summary_url: Option<Arc<str>>,
|
||||
pub zed_client_checksum_seed: Option<String>,
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
@@ -235,8 +233,6 @@ impl Config {
|
||||
stripe_api_key: None,
|
||||
stripe_price_id: None,
|
||||
supermaven_admin_api_key: None,
|
||||
runpod_api_key: None,
|
||||
runpod_api_summary_url: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -400,42 +400,6 @@ async fn perform_completion(
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
LanguageModelProvider::Zed => {
|
||||
let api_key = state
|
||||
.config
|
||||
.runpod_api_key
|
||||
.as_ref()
|
||||
.context("no Qwen2-7B API key configured on the server")?;
|
||||
let api_url = state
|
||||
.config
|
||||
.runpod_api_summary_url
|
||||
.as_ref()
|
||||
.context("no Qwen2-7B URL configured on the server")?;
|
||||
let chunks = open_ai::stream_completion(
|
||||
&state.http_client,
|
||||
api_url,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
chunks
|
||||
.map(|event| {
|
||||
event.map(|chunk| {
|
||||
let input_tokens =
|
||||
chunk.usage.as_ref().map_or(0, |u| u.prompt_tokens) as usize;
|
||||
let output_tokens =
|
||||
chunk.usage.as_ref().map_or(0, |u| u.completion_tokens) as usize;
|
||||
(
|
||||
serde_json::to_vec(&chunk).unwrap(),
|
||||
input_tokens,
|
||||
output_tokens,
|
||||
)
|
||||
})
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Response::new(Body::wrap_stream(TokenCountingStream {
|
||||
|
||||
@@ -77,7 +77,6 @@ fn authorize_access_for_country(
|
||||
LanguageModelProvider::Anthropic => anthropic::is_supported_country(country_code),
|
||||
LanguageModelProvider::OpenAi => open_ai::is_supported_country(country_code),
|
||||
LanguageModelProvider::Google => google_ai::is_supported_country(country_code),
|
||||
LanguageModelProvider::Zed => true,
|
||||
};
|
||||
if !is_country_supported_by_provider {
|
||||
Err(Error::http(
|
||||
@@ -213,7 +212,6 @@ mod tests {
|
||||
(LanguageModelProvider::Anthropic, "T1"), // Tor
|
||||
(LanguageModelProvider::OpenAi, "T1"), // Tor
|
||||
(LanguageModelProvider::Google, "T1"), // Tor
|
||||
(LanguageModelProvider::Zed, "T1"), // Tor
|
||||
];
|
||||
|
||||
for (provider, country_code) in cases {
|
||||
|
||||
@@ -40,15 +40,6 @@ pub async fn seed_database(_config: &Config, db: &mut LlmDatabase, _force: bool)
|
||||
price_per_million_input_tokens: 25, // $0.25/MTok
|
||||
price_per_million_output_tokens: 125, // $1.25/MTok
|
||||
},
|
||||
ModelParams {
|
||||
provider: LanguageModelProvider::Zed,
|
||||
name: "Qwen/Qwen2-7B-Instruct".into(),
|
||||
max_requests_per_minute: 5,
|
||||
max_tokens_per_minute: 25_000, // These are arbitrary limits we've set to cap costs; we control this number
|
||||
max_tokens_per_day: 300_000,
|
||||
price_per_million_input_tokens: 25,
|
||||
price_per_million_output_tokens: 125,
|
||||
},
|
||||
])
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ async fn test_initialize_providers(db: &mut LlmDatabase) {
|
||||
LanguageModelProvider::Anthropic,
|
||||
LanguageModelProvider::Google,
|
||||
LanguageModelProvider::OpenAi,
|
||||
LanguageModelProvider::Zed
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
@@ -474,9 +474,6 @@ impl Server {
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::GetReferences>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::SearchProject>,
|
||||
))
|
||||
.add_request_handler(user_handler(forward_find_search_candidates_request))
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::GetDocumentHighlights>,
|
||||
@@ -2298,7 +2295,7 @@ async fn list_remote_directory(
|
||||
let dev_server_connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id_supporting(dev_server_id, ZedVersion::with_list_directory())?;
|
||||
.online_dev_server_connection_id(dev_server_id)?;
|
||||
|
||||
session
|
||||
.db()
|
||||
@@ -2337,10 +2334,7 @@ async fn update_dev_server_project(
|
||||
let dev_server_connection_id = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.dev_server_connection_id_supporting(
|
||||
dev_server_project.dev_server_id,
|
||||
ZedVersion::with_list_directory(),
|
||||
)?;
|
||||
.online_dev_server_connection_id(dev_server_project.dev_server_id)?;
|
||||
|
||||
session.peer.send(
|
||||
dev_server_connection_id,
|
||||
@@ -2950,40 +2944,6 @@ async fn forward_find_search_candidates_request(
|
||||
.await
|
||||
.host_for_read_only_project_request(project_id, session.connection_id, session.user_id())
|
||||
.await?;
|
||||
|
||||
let host_version = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.connection(host_connection_id)
|
||||
.map(|c| c.zed_version);
|
||||
|
||||
if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates())
|
||||
{
|
||||
let query = request.query.ok_or_else(|| anyhow!("missing query"))?;
|
||||
let search = proto::SearchProject {
|
||||
project_id: project_id.to_proto(),
|
||||
query: query.query,
|
||||
regex: query.regex,
|
||||
whole_word: query.whole_word,
|
||||
case_sensitive: query.case_sensitive,
|
||||
files_to_include: query.files_to_include,
|
||||
files_to_exclude: query.files_to_exclude,
|
||||
include_ignored: query.include_ignored,
|
||||
};
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, search)
|
||||
.await?;
|
||||
return response.send(proto::FindSearchCandidatesResponse {
|
||||
buffer_ids: payload
|
||||
.locations
|
||||
.into_iter()
|
||||
.map(|loc| loc.buffer_id)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, request)
|
||||
|
||||
@@ -32,15 +32,7 @@ impl fmt::Display for ZedVersion {
|
||||
|
||||
impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
self.0 >= SemanticVersion::new(0, 134, 0)
|
||||
}
|
||||
|
||||
pub fn with_list_directory() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 145, 0))
|
||||
}
|
||||
|
||||
pub fn with_search_candidates() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 151, 0))
|
||||
self.0 >= SemanticVersion::new(0, 151, 0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,6 +161,16 @@ impl ConnectionPool {
|
||||
self.connected_dev_servers.get(&dev_server_id).copied()
|
||||
}
|
||||
|
||||
pub fn online_dev_server_connection_id(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> Result<ConnectionId> {
|
||||
match self.connected_dev_servers.get(&dev_server_id) {
|
||||
Some(cid) => Ok(*cid),
|
||||
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dev_server_connection_id_supporting(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
|
||||
@@ -4,10 +4,13 @@ use anyhow::Context;
|
||||
use chrono::{DateTime, Utc};
|
||||
use db::Database;
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
use std::{fmt::Write, fs, path::Path};
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use crate::Config;
|
||||
|
||||
/// A GitHub user.
|
||||
///
|
||||
/// This representation corresponds to the entries in the `seed/github_users.json` file.
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GithubUser {
|
||||
id: i32,
|
||||
@@ -18,12 +21,10 @@ struct GithubUser {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SeedConfig {
|
||||
// Which users to create as admins.
|
||||
/// Which users to create as admins.
|
||||
admins: Vec<String>,
|
||||
// Which channels to create (all admins are invited to all channels)
|
||||
/// Which channels to create (all admins are invited to all channels).
|
||||
channels: Vec<String>,
|
||||
// Number of random users to create from the Github API
|
||||
number_of_users: Option<usize>,
|
||||
}
|
||||
|
||||
pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> {
|
||||
@@ -47,11 +48,21 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
let flag_names = ["remoting", "language-models"];
|
||||
let mut flags = Vec::new();
|
||||
|
||||
let existing_feature_flags = db.list_feature_flags().await?;
|
||||
|
||||
for flag_name in flag_names {
|
||||
if existing_feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag.flag == flag_name)
|
||||
{
|
||||
log::info!("Flag {flag_name:?} already exists");
|
||||
continue;
|
||||
}
|
||||
|
||||
let flag = db
|
||||
.create_user_flag(flag_name, false)
|
||||
.await
|
||||
.unwrap_or_else(|_| panic!("failed to create flag: '{flag_name}'"));
|
||||
.unwrap_or_else(|err| panic!("failed to create flag: '{flag_name}': {err}"));
|
||||
flags.push(flag);
|
||||
}
|
||||
|
||||
@@ -106,44 +117,29 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Fix this later
|
||||
if let Some(number_of_users) = seed_config.number_of_users {
|
||||
// Fetch 100 other random users from GitHub and insert them into the database
|
||||
// (for testing autocompleters, etc.)
|
||||
let mut user_count = db
|
||||
.get_all_users(0, 200)
|
||||
let github_users_filepath = seed_path.parent().unwrap().join("seed/github_users.json");
|
||||
let github_users: Vec<GithubUser> =
|
||||
serde_json::from_str(&fs::read_to_string(github_users_filepath)?)?;
|
||||
|
||||
for github_user in github_users {
|
||||
log::info!("Seeding {:?} from GitHub", github_user.login);
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
github_user.created_at,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.expect("failed to load users from db")
|
||||
.len();
|
||||
let mut last_user_id = None;
|
||||
while user_count < number_of_users {
|
||||
let mut uri = "https://api.github.com/users?per_page=100".to_string();
|
||||
if let Some(last_user_id) = last_user_id {
|
||||
write!(&mut uri, "&since={}", last_user_id).unwrap();
|
||||
}
|
||||
let users = fetch_github::<Vec<GithubUser>>(&client, &uri).await;
|
||||
.expect("failed to insert user");
|
||||
|
||||
for github_user in users {
|
||||
last_user_id = Some(github_user.id);
|
||||
user_count += 1;
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
github_user.created_at,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.expect("failed to insert user");
|
||||
|
||||
for flag in &flags {
|
||||
db.add_user_flag(user.id, *flag).await.context(format!(
|
||||
"Unable to enable flag '{}' for user '{}'",
|
||||
flag, user.id
|
||||
))?;
|
||||
}
|
||||
}
|
||||
for flag in &flags {
|
||||
db.add_user_flag(user.id, *flag).await.context(format!(
|
||||
"Unable to enable flag '{}' for user '{}'",
|
||||
flag, user.id
|
||||
))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -246,7 +246,7 @@ async fn test_channel_notes_participant_indices(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
// Clients A and B open the same file.
|
||||
|
||||
@@ -7,18 +7,12 @@ use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
|
||||
RevertSelectedHunks, ToggleCodeActions, Undo,
|
||||
},
|
||||
display_map::DisplayRow,
|
||||
test::{
|
||||
editor_hunks,
|
||||
editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
expanded_hunks, expanded_hunks_background_highlights,
|
||||
ToggleCodeActions, Undo,
|
||||
},
|
||||
test::editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
Editor,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use git::diff::DiffHunkStatus;
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
@@ -82,7 +76,7 @@ async fn test_host_disconnect(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
cx_a.background_executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
|
||||
@@ -198,7 +192,7 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client A
|
||||
let buffer_a = project_a
|
||||
@@ -314,7 +308,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the guest.
|
||||
let buffer_b = project_b
|
||||
@@ -571,7 +565,7 @@ async fn test_collaborating_with_code_actions(
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
@@ -786,7 +780,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
@@ -1036,7 +1030,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
.await
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
let status = project.language_server_statuses(cx).next().unwrap().1;
|
||||
@@ -1132,9 +1126,7 @@ async fn test_share_project(
|
||||
.await
|
||||
.unwrap();
|
||||
let client_b_peer_id = client_b.peer_id().unwrap();
|
||||
let project_b = client_b
|
||||
.build_dev_server_project(initial_project.id, cx_b)
|
||||
.await;
|
||||
let project_b = client_b.join_remote_project(initial_project.id, cx_b).await;
|
||||
|
||||
let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id());
|
||||
|
||||
@@ -1236,9 +1228,7 @@ async fn test_share_project(
|
||||
.update(cx_c, |call, cx| call.accept_incoming(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let _project_c = client_c
|
||||
.build_dev_server_project(initial_project.id, cx_c)
|
||||
.await;
|
||||
let _project_c = client_c.join_remote_project(initial_project.id, cx_c).await;
|
||||
|
||||
// Client B closes the editor, and client A sees client B's selections removed.
|
||||
cx_b.update(move |_| drop(editor_b));
|
||||
@@ -1297,7 +1287,7 @@ async fn test_on_input_format_from_host_to_guest(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the host.
|
||||
let buffer_a = project_a
|
||||
@@ -1417,7 +1407,7 @@ async fn test_on_input_format_from_guest_to_host(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a file in an editor as the guest.
|
||||
let buffer_b = project_b
|
||||
@@ -1580,7 +1570,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
.unwrap();
|
||||
|
||||
// Client B joins the project
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1842,7 +1832,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1970,288 +1960,6 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
|
||||
client_a.language_registry().add(rust_lang());
|
||||
client_b.language_registry().add(rust_lang());
|
||||
|
||||
let base_text = indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct Row9;
|
||||
struct Row10;"#};
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": base_text,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
let editor_a = workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
let mut editor_cx_a = EditorTestContext {
|
||||
cx: cx_a.clone(),
|
||||
window: cx_a.handle(),
|
||||
editor: editor_a,
|
||||
assertion_cx: AssertionContextManager::new(),
|
||||
};
|
||||
let mut editor_cx_b = EditorTestContext {
|
||||
cx: cx_b.clone(),
|
||||
window: cx_b.handle(),
|
||||
editor: editor_b,
|
||||
assertion_cx: AssertionContextManager::new(),
|
||||
};
|
||||
|
||||
// host edits the file, that differs from the base text, producing diff hunks
|
||||
editor_cx_a.set_state(indoc! {r#"struct Row;
|
||||
struct Row0.1;
|
||||
struct Row0.2;
|
||||
struct Row1;
|
||||
|
||||
struct Row4;
|
||||
struct Row5444;
|
||||
struct Row6;
|
||||
|
||||
struct Row9;
|
||||
struct Row1220;ˇ"#});
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
});
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
});
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
|
||||
// the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection
|
||||
// the host does not see the diffs toggled
|
||||
editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row;
|
||||
struct Row0.1;
|
||||
struct Row0.2;
|
||||
struct Row1;
|
||||
|
||||
struct Row4;
|
||||
struct Row5444;
|
||||
struct Row6;
|
||||
|
||||
struct R»ow9;
|
||||
struct Row1220;"#});
|
||||
editor_cx_b
|
||||
.update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx));
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(1)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"struct Row2;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(4)..DisplayRow(4)
|
||||
),
|
||||
(
|
||||
"struct Row5;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"struct Row8;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(9)..DisplayRow(9)
|
||||
),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(1)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"struct Row2;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
),
|
||||
(
|
||||
"struct Row5;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(8)..DisplayRow(9)
|
||||
),
|
||||
(
|
||||
"struct Row8;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(12)..DisplayRow(12)
|
||||
),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(13)..DisplayRow(13),
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]);
|
||||
});
|
||||
|
||||
// the client reverts the hunks, removing the expanded diffs too
|
||||
// both host and the client observe the reverted state (with one hunk left, not covered by client's selection)
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(5)..=DisplayRow(5)]
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_a.assert_editor_state(indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct Row9;
|
||||
struct Row1220;ˇ"#});
|
||||
editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct R»ow9;
|
||||
struct Row1220;"#});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
@@ -2338,7 +2046,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
|
||||
@@ -74,7 +74,7 @@ async fn test_basic_following(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -162,7 +162,7 @@ async fn test_basic_following(
|
||||
|
||||
executor.run_until_parked();
|
||||
let active_call_c = cx_c.read(ActiveCall::global);
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c);
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
|
||||
@@ -175,7 +175,7 @@ async fn test_basic_following(
|
||||
|
||||
cx_d.executor().run_until_parked();
|
||||
let active_call_d = cx_d.read(ActiveCall::global);
|
||||
let project_d = client_d.build_dev_server_project(project_id, cx_d).await;
|
||||
let project_d = client_d.join_remote_project(project_id, cx_d).await;
|
||||
let (workspace_d, cx_d) = client_d.build_workspace(&project_d, cx_d);
|
||||
active_call_d
|
||||
.update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
|
||||
@@ -289,7 +289,7 @@ async fn test_basic_following(
|
||||
.get_open_buffer(&(worktree_id, "2.txt").into(), cx)
|
||||
.unwrap()
|
||||
});
|
||||
let mut result = MultiBuffer::new(0, Capability::ReadWrite);
|
||||
let mut result = MultiBuffer::new(Capability::ReadWrite);
|
||||
result.push_excerpts(
|
||||
buffer_a1,
|
||||
[ExcerptRange {
|
||||
@@ -569,7 +569,7 @@ async fn test_following_tab_order(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -686,7 +686,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.unwrap();
|
||||
|
||||
// Client B joins the project.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1199,7 +1199,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
@@ -1335,7 +1335,7 @@ async fn test_peers_simultaneously_following_each_other(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
executor.run_until_parked();
|
||||
@@ -1685,7 +1685,7 @@ async fn test_following_into_excluded_file(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
|
||||
@@ -28,8 +28,8 @@ use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind,
|
||||
Project, ProjectPath,
|
||||
lsp_store::FormatTrigger, search::SearchQuery, search::SearchResult, DiagnosticSummary,
|
||||
HoverBlockKind, Project, ProjectPath,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
@@ -1372,7 +1372,7 @@ async fn test_unshare_project(
|
||||
.unwrap();
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
|
||||
@@ -1392,7 +1392,7 @@ async fn test_unshare_project(
|
||||
assert!(project_b.read_with(cx_b, |project, _| project.is_disconnected()));
|
||||
|
||||
// Client C opens the project.
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
|
||||
// When client A unshares the project, client C's project becomes read-only.
|
||||
project_a
|
||||
@@ -1409,7 +1409,7 @@ async fn test_unshare_project(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_c2 = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c2 = client_c.join_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.has_update_observer()));
|
||||
@@ -1514,9 +1514,9 @@ async fn test_project_reconnect(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b1 = client_b.build_dev_server_project(project1_id, cx_b).await;
|
||||
let project_b2 = client_b.build_dev_server_project(project2_id, cx_b).await;
|
||||
let project_b3 = client_b.build_dev_server_project(project3_id, cx_b).await;
|
||||
let project_b1 = client_b.join_remote_project(project1_id, cx_b).await;
|
||||
let project_b2 = client_b.join_remote_project(project2_id, cx_b).await;
|
||||
let project_b3 = client_b.join_remote_project(project3_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
|
||||
@@ -2310,8 +2310,8 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
.unwrap();
|
||||
|
||||
// Join that worktree as clients B and C.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let project_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |p, cx| p.worktrees(cx).next().unwrap());
|
||||
|
||||
@@ -2535,7 +2535,7 @@ async fn test_git_diff_base_change(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let diff_base = "
|
||||
one
|
||||
@@ -2791,7 +2791,7 @@ async fn test_git_branch_name(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
|
||||
client_a
|
||||
.fs()
|
||||
.set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
|
||||
@@ -2836,7 +2836,7 @@ async fn test_git_branch_name(
|
||||
assert_branch(Some("branch-2"), project, cx)
|
||||
});
|
||||
|
||||
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_remote_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_remote_c.read_with(cx_c, |project, cx| {
|
||||
@@ -2891,7 +2891,7 @@ async fn test_git_status_sync(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_remote = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_remote = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Wait for it to catch up to the new status
|
||||
executor.run_until_parked();
|
||||
@@ -2967,7 +2967,7 @@ async fn test_git_status_sync(
|
||||
});
|
||||
|
||||
// And synchronization while joining
|
||||
let project_remote_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_remote_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_remote_c.read_with(cx_c, |project, cx| {
|
||||
@@ -3015,7 +3015,7 @@ async fn test_fs_operations(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
@@ -3316,7 +3316,7 @@ async fn test_local_settings(
|
||||
executor.run_until_parked();
|
||||
|
||||
// As client B, join that project and observe the local settings.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let worktree_b = project_b.read_with(cx_b, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
executor.run_until_parked();
|
||||
@@ -3439,7 +3439,7 @@ async fn test_buffer_conflict_after_save(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client B
|
||||
let buffer_b = project_b
|
||||
@@ -3503,7 +3503,7 @@ async fn test_buffer_reloading(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client B
|
||||
let buffer_b = project_b
|
||||
@@ -3557,7 +3557,7 @@ async fn test_editing_while_guest_opens_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open a buffer as client A
|
||||
let buffer_a = project_a
|
||||
@@ -3605,7 +3605,7 @@ async fn test_leaving_worktree_while_opening_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// See that a guest has joined as client A.
|
||||
executor.run_until_parked();
|
||||
@@ -3652,7 +3652,7 @@ async fn test_canceling_buffer_opening(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
|
||||
@@ -3709,8 +3709,8 @@ async fn test_leaving_project(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b1 = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_b1 = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let project_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
@@ -3751,7 +3751,7 @@ async fn test_leaving_project(
|
||||
});
|
||||
|
||||
// Client B re-joins the project and can open buffers as before.
|
||||
let project_b2 = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b2 = client_b.join_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
project_a.read_with(cx_a, |project, _| {
|
||||
@@ -3927,7 +3927,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
);
|
||||
|
||||
// Join the worktree as client B.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Wait for server to see the diagnostics update.
|
||||
executor.run_until_parked();
|
||||
@@ -3952,7 +3952,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
});
|
||||
|
||||
// Join project as client C and observe the diagnostics.
|
||||
let project_c = client_c.build_dev_server_project(project_id, cx_c).await;
|
||||
let project_c = client_c.join_remote_project(project_id, cx_c).await;
|
||||
executor.run_until_parked();
|
||||
let project_c_diagnostic_summaries =
|
||||
Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
|
||||
@@ -4160,7 +4160,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
.unwrap();
|
||||
|
||||
// Join the project as client B and open all three files.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| {
|
||||
project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx))
|
||||
}))
|
||||
@@ -4266,7 +4266,7 @@ async fn test_reloading_buffer_manually(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
@@ -4364,7 +4364,7 @@ async fn test_formatting_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
@@ -4486,7 +4486,7 @@ async fn test_prettier_formatting_buffer(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
|
||||
@@ -4599,7 +4599,7 @@ async fn test_definition(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
@@ -4744,7 +4744,7 @@ async fn test_references(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx));
|
||||
@@ -4901,7 +4901,7 @@ async fn test_project_search(
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Perform a search as the guest.
|
||||
let mut results = HashMap::default();
|
||||
@@ -4991,7 +4991,7 @@ async fn test_document_highlights(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file on client B.
|
||||
let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
|
||||
@@ -5109,7 +5109,7 @@ async fn test_lsp_hover(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Open the file as the guest
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
|
||||
@@ -5286,7 +5286,7 @@ async fn test_project_symbols(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Cause the language server to start.
|
||||
let open_buffer_task =
|
||||
@@ -5381,7 +5381,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
|
||||
let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap();
|
||||
@@ -6470,7 +6470,7 @@ async fn test_context_collaboration_with_reconnect(
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
|
||||
@@ -298,8 +298,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local =
|
||||
project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees(cx)
|
||||
@@ -335,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
|
||||
match rng.gen_range(0..100_u32) {
|
||||
// Manipulate an existing buffer
|
||||
@@ -1256,7 +1255,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let buffers = client.buffers().clone();
|
||||
for (guest_project, guest_buffers) in &buffers {
|
||||
let project_id = if guest_project.read_with(client_cx, |project, _| {
|
||||
project.is_local_or_ssh() || project.is_disconnected()
|
||||
project.is_local() || project.is_disconnected()
|
||||
}) {
|
||||
continue;
|
||||
} else {
|
||||
@@ -1560,9 +1559,7 @@ async fn ensure_project_shared(
|
||||
let first_root_name = root_name_for_project(project, cx);
|
||||
let active_call = cx.read(ActiveCall::global);
|
||||
if active_call.read_with(cx, |call, _| call.room().is_some())
|
||||
&& project.read_with(cx, |project, _| {
|
||||
project.is_local_or_ssh() && !project.is_shared()
|
||||
})
|
||||
&& project.read_with(cx, |project, _| project.is_local() && !project.is_shared())
|
||||
{
|
||||
match active_call
|
||||
.update(cx, |call, cx| call.share_project(project.clone(), cx))
|
||||
|
||||
@@ -3,12 +3,13 @@ use call::ActiveCall;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use language::language_settings::all_language_settings;
|
||||
use project::ProjectPath;
|
||||
use remote::SshSession;
|
||||
use remote_server::HeadlessProject;
|
||||
use serde_json::json;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
#[gpui::test]
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_sharing_an_ssh_remote_project(
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
@@ -54,7 +55,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
.build_ssh_project("/code/project1", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// User A shares the remote project.
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
@@ -62,12 +63,30 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let worktree_b = project_b
|
||||
.update(cx_b, |project, cx| project.worktree_for_id(worktree_id, cx))
|
||||
.unwrap();
|
||||
|
||||
let worktree_a = project_a
|
||||
.update(cx_a, |project, cx| project.worktree_for_id(worktree_id, cx))
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
worktree_a.update(cx_a, |worktree, _cx| {
|
||||
assert_eq!(
|
||||
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
|
||||
vec![
|
||||
Path::new(".zed"),
|
||||
Path::new(".zed/settings.json"),
|
||||
Path::new("README.md"),
|
||||
Path::new("src"),
|
||||
Path::new("src/lib.rs"),
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
worktree_b.update(cx_b, |worktree, _cx| {
|
||||
assert_eq!(
|
||||
worktree.paths().map(Arc::as_ref).collect::<Vec<_>>(),
|
||||
@@ -102,19 +121,41 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
all_language_settings(file, cx)
|
||||
.language(Some(&("Rust".into())))
|
||||
.language_servers,
|
||||
["override-rust-analyzer".into()]
|
||||
["override-rust-analyzer".to_string()]
|
||||
)
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| project.save_buffer(buffer_b, cx))
|
||||
.update(cx_b, |project, cx| {
|
||||
project.save_buffer_as(
|
||||
buffer_b.clone(),
|
||||
ProjectPath {
|
||||
worktree_id: worktree_id.to_owned(),
|
||||
path: Arc::from(Path::new("src/renamed.rs")),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
remote_fs
|
||||
.load("/code/project1/src/lib.rs".as_ref())
|
||||
.load("/code/project1/src/renamed.rs".as_ref())
|
||||
.await
|
||||
.unwrap(),
|
||||
"fn one() -> usize { 100 }"
|
||||
);
|
||||
cx_b.run_until_parked();
|
||||
cx_b.update(|cx| {
|
||||
assert_eq!(
|
||||
buffer_b
|
||||
.read(cx)
|
||||
.file()
|
||||
.unwrap()
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
"src/renamed.rs".to_string()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ use git::GitHostingProviderRegistry;
|
||||
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
|
||||
use http_client::FakeHttpClient;
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use notifications::NotificationStore;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
@@ -278,7 +278,7 @@ impl TestServer {
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
session,
|
||||
});
|
||||
|
||||
@@ -408,7 +408,7 @@ impl TestServer {
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
session,
|
||||
});
|
||||
|
||||
@@ -679,8 +679,6 @@ impl TestServer {
|
||||
stripe_api_key: None,
|
||||
stripe_price_id: None,
|
||||
supermaven_admin_api_key: None,
|
||||
runpod_api_key: None,
|
||||
runpod_api_summary_url: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
},
|
||||
})
|
||||
@@ -921,7 +919,7 @@ impl TestClient {
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_dev_server_project(
|
||||
pub async fn join_remote_project(
|
||||
&self,
|
||||
host_project_id: u64,
|
||||
guest_cx: &mut TestAppContext,
|
||||
|
||||
@@ -239,7 +239,6 @@ pub struct Resource {
|
||||
pub struct ResourceContent {
|
||||
pub uri: Url,
|
||||
pub mime_type: Option<String>,
|
||||
pub content_type: String,
|
||||
pub text: Option<String>,
|
||||
pub data: Option<String>,
|
||||
}
|
||||
|
||||
@@ -37,7 +37,6 @@ fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
menu.workspace = true
|
||||
|
||||
@@ -57,7 +57,7 @@ pub fn init(
|
||||
new_server_id: LanguageServerId,
|
||||
fs: Arc<dyn Fs>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
copilot_chat::init(fs, http.clone(), cx);
|
||||
@@ -302,7 +302,7 @@ pub struct Completion {
|
||||
|
||||
pub struct Copilot {
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
server: CopilotServer,
|
||||
buffers: HashSet<WeakModel<Buffer>>,
|
||||
server_id: LanguageServerId,
|
||||
@@ -334,7 +334,7 @@ impl Copilot {
|
||||
fn start(
|
||||
new_server_id: LanguageServerId,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let mut this = Self {
|
||||
@@ -392,7 +392,7 @@ impl Copilot {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn fake(cx: &mut gpui::TestAppContext) -> (Model<Self>, lsp::FakeLanguageServer) {
|
||||
use lsp::FakeLanguageServer;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
|
||||
let (server, fake_server) = FakeLanguageServer::new(
|
||||
LanguageServerId(0),
|
||||
@@ -406,7 +406,7 @@ impl Copilot {
|
||||
cx.to_async(),
|
||||
);
|
||||
let http = http_client::FakeHttpClient::create(|_| async { unreachable!() });
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
let this = cx.new_model(|cx| Self {
|
||||
server_id: LanguageServerId(0),
|
||||
http: http.clone(),
|
||||
@@ -425,7 +425,7 @@ impl Copilot {
|
||||
async fn start_language_server(
|
||||
new_server_id: LanguageServerId,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
this: WeakModel<Self>,
|
||||
mut cx: AsyncAppContext,
|
||||
) {
|
||||
|
||||
@@ -7,8 +7,7 @@ use chrono::DateTime;
|
||||
use fs::Fs;
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
use gpui::{AppContext, AsyncAppContext, Global};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use paths::home_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::watch_config_file;
|
||||
@@ -275,7 +274,7 @@ async fn request_api_token(
|
||||
.header("Accept", "application/json");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
|
||||
let request = request_builder.body(AsyncBody::empty())?;
|
||||
@@ -332,7 +331,7 @@ async fn stream_completion(
|
||||
.header("Copilot-Integration-Id", "vscode-chat");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
let mut response = client.send(request).await?;
|
||||
|
||||
@@ -767,7 +767,7 @@ mod tests {
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx));
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
|
||||
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[ExcerptRange {
|
||||
@@ -1018,7 +1018,7 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
|
||||
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
private_buffer.clone(),
|
||||
[ExcerptRange {
|
||||
|
||||
@@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor {
|
||||
cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx))
|
||||
.detach();
|
||||
|
||||
let excerpts = cx.new_model(|cx| {
|
||||
MultiBuffer::new(
|
||||
project_handle.read(cx).replica_id(),
|
||||
project_handle.read(cx).capability(),
|
||||
)
|
||||
});
|
||||
let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability()));
|
||||
let editor = cx.new_view(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx);
|
||||
|
||||
@@ -24,7 +24,8 @@ test-support = [
|
||||
"workspace/test-support",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript",
|
||||
"tree-sitter-html"
|
||||
"tree-sitter-html",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
@@ -54,6 +55,7 @@ markdown.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
project.workspace = true
|
||||
rand.workspace = true
|
||||
rpc.workspace = true
|
||||
@@ -74,6 +76,7 @@ theme.workspace = true
|
||||
tree-sitter-html = { workspace = true, optional = true }
|
||||
tree-sitter-rust = { workspace = true, optional = true }
|
||||
tree-sitter-typescript = { workspace = true, optional = true }
|
||||
unindent = { workspace = true, optional = true }
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -230,7 +230,11 @@ gpui::actions!(
|
||||
ExpandMacroRecursively,
|
||||
FindAllReferences,
|
||||
Fold,
|
||||
FoldAll,
|
||||
FoldRecursive,
|
||||
FoldSelectedRanges,
|
||||
ToggleFold,
|
||||
ToggleFoldRecursive,
|
||||
Format,
|
||||
GoToDeclaration,
|
||||
GoToDeclarationSplit,
|
||||
@@ -273,6 +277,7 @@ gpui::actions!(
|
||||
NextScreen,
|
||||
OpenExcerpts,
|
||||
OpenExcerptsSplit,
|
||||
OpenProposedChangesEditor,
|
||||
OpenFile,
|
||||
OpenPermalinkToLine,
|
||||
OpenUrl,
|
||||
@@ -339,7 +344,9 @@ gpui::actions!(
|
||||
Transpose,
|
||||
Undo,
|
||||
UndoSelection,
|
||||
UnfoldAll,
|
||||
UnfoldLines,
|
||||
UnfoldRecursive,
|
||||
UniqueLinesCaseInsensitive,
|
||||
UniqueLinesCaseSensitive,
|
||||
]
|
||||
|
||||
@@ -1360,7 +1360,7 @@ impl<'a> Iterator for BlockBufferRows<'a> {
|
||||
impl sum_tree::Item for Transform {
|
||||
type Summary = TransformSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
self.summary.clone()
|
||||
}
|
||||
}
|
||||
@@ -1671,7 +1671,7 @@ mod tests {
|
||||
|
||||
let mut excerpt_ids = Vec::new();
|
||||
let multi_buffer = cx.new_model(|cx| {
|
||||
let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite);
|
||||
let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
|
||||
excerpt_ids.extend(multi_buffer.push_excerpts(
|
||||
buffer1.clone(),
|
||||
[ExcerptRange {
|
||||
|
||||
@@ -69,7 +69,7 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
range: Range<MultiBufferRow>,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> impl '_ + Iterator<Item = &'a Crease> {
|
||||
) -> impl 'a + Iterator<Item = &'a Crease> {
|
||||
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
@@ -291,7 +291,7 @@ impl sum_tree::Summary for ItemSummary {
|
||||
impl sum_tree::Item for CreaseItem {
|
||||
type Summary = ItemSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
|
||||
ItemSummary {
|
||||
range: self.crease.range.clone(),
|
||||
}
|
||||
|
||||
@@ -944,7 +944,7 @@ struct TransformSummary {
|
||||
impl sum_tree::Item for Transform {
|
||||
type Summary = TransformSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
self.summary.clone()
|
||||
}
|
||||
}
|
||||
@@ -1004,7 +1004,7 @@ impl Default for FoldRange {
|
||||
impl sum_tree::Item for Fold {
|
||||
type Summary = FoldSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
|
||||
FoldSummary {
|
||||
start: self.range.start,
|
||||
end: self.range.end,
|
||||
|
||||
@@ -74,7 +74,7 @@ impl Inlay {
|
||||
impl sum_tree::Item for Transform {
|
||||
type Summary = TransformSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
match self {
|
||||
Transform::Isomorphic(summary) => TransformSummary {
|
||||
input: summary.clone(),
|
||||
|
||||
@@ -917,7 +917,7 @@ impl Transform {
|
||||
impl sum_tree::Item for Transform {
|
||||
type Summary = TransformSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
self.summary.clone()
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,6 @@ pub struct EditorSettings {
|
||||
pub show_completions_on_input: bool,
|
||||
pub show_completion_documentation: bool,
|
||||
pub completion_documentation_secondary_query_debounce: u64,
|
||||
pub use_on_type_format: bool,
|
||||
pub toolbar: Toolbar,
|
||||
pub scrollbar: Scrollbar,
|
||||
pub gutter: Gutter,
|
||||
@@ -209,11 +208,6 @@ pub struct EditorSettingsContent {
|
||||
///
|
||||
/// Default: 300 ms
|
||||
pub completion_documentation_secondary_query_debounce: Option<u64>,
|
||||
/// Whether to use additional LSP queries to format (and amend) the code after
|
||||
/// every "trigger" symbol input, defined by LSP server capabilities.
|
||||
///
|
||||
/// Default: true
|
||||
pub use_on_type_format: Option<bool>,
|
||||
/// Toolbar related settings
|
||||
pub toolbar: Option<ToolbarContent>,
|
||||
/// Scrollbar related settings
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,14 +7,11 @@ use crate::{
|
||||
CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ScrollBeyondLastLine,
|
||||
ShowScrollbar,
|
||||
},
|
||||
git::{
|
||||
blame::{CommitDetails, GitBlame},
|
||||
diff_hunk_to_display, DisplayDiffHunk,
|
||||
},
|
||||
git::blame::{CommitDetails, GitBlame},
|
||||
hover_popover::{
|
||||
self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
},
|
||||
hunk_diff::ExpandedHunk,
|
||||
hunk_diff::{diff_hunk_to_display, DisplayDiffHunk},
|
||||
hunk_status,
|
||||
items::BufferSearchHighlights,
|
||||
mouse_context_menu::{self, MenuPosition, MouseContextMenu},
|
||||
@@ -23,8 +20,8 @@ use crate::{
|
||||
DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown,
|
||||
HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown,
|
||||
PageUp, Point, RangeToAnchorExt, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap,
|
||||
ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN,
|
||||
PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint,
|
||||
CURSORS_VISIBLE_FOR, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED, MAX_LINE_LEN,
|
||||
};
|
||||
use client::ParticipantIndex;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
@@ -305,7 +302,7 @@ impl EditorElement {
|
||||
}
|
||||
register_action(view, cx, Editor::go_to_diagnostic);
|
||||
register_action(view, cx, Editor::go_to_prev_diagnostic);
|
||||
register_action(view, cx, Editor::go_to_hunk);
|
||||
register_action(view, cx, Editor::go_to_next_hunk);
|
||||
register_action(view, cx, Editor::go_to_prev_hunk);
|
||||
register_action(view, cx, |editor, a, cx| {
|
||||
editor.go_to_definition(a, cx).detach_and_log_err(cx);
|
||||
@@ -338,14 +335,21 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::open_url);
|
||||
register_action(view, cx, Editor::open_file);
|
||||
register_action(view, cx, Editor::fold);
|
||||
register_action(view, cx, Editor::fold_all);
|
||||
register_action(view, cx, Editor::fold_at);
|
||||
register_action(view, cx, Editor::fold_recursive);
|
||||
register_action(view, cx, Editor::toggle_fold);
|
||||
register_action(view, cx, Editor::toggle_fold_recursive);
|
||||
register_action(view, cx, Editor::unfold_lines);
|
||||
register_action(view, cx, Editor::unfold_recursive);
|
||||
register_action(view, cx, Editor::unfold_all);
|
||||
register_action(view, cx, Editor::unfold_at);
|
||||
register_action(view, cx, Editor::fold_selected_ranges);
|
||||
register_action(view, cx, Editor::show_completions);
|
||||
register_action(view, cx, Editor::toggle_code_actions);
|
||||
register_action(view, cx, Editor::open_excerpts);
|
||||
register_action(view, cx, Editor::open_excerpts_in_split);
|
||||
register_action(view, cx, Editor::open_proposed_changes_editor);
|
||||
register_action(view, cx, Editor::toggle_soft_wrap);
|
||||
register_action(view, cx, Editor::toggle_tab_bar);
|
||||
register_action(view, cx, Editor::toggle_line_numbers);
|
||||
@@ -491,28 +495,7 @@ impl EditorElement {
|
||||
let mut modifiers = event.modifiers;
|
||||
|
||||
if let Some(hovered_hunk) = hovered_hunk {
|
||||
if modifiers.control || modifiers.platform {
|
||||
editor.toggle_hovered_hunk(&hovered_hunk, cx);
|
||||
} else {
|
||||
let display_range = hovered_hunk
|
||||
.multi_buffer_range
|
||||
.clone()
|
||||
.to_display_points(&position_map.snapshot);
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
&position_map.snapshot,
|
||||
position_map.line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&DisplayDiffHunk::Unfolded {
|
||||
diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(),
|
||||
display_row_range: display_range.start.row()..display_range.end.row(),
|
||||
multi_buffer_range: hovered_hunk.multi_buffer_range.clone(),
|
||||
status: hovered_hunk.status,
|
||||
},
|
||||
);
|
||||
if hunk_bounds.contains(&event.position) {
|
||||
editor.open_hunk_context_menu(hovered_hunk, event.position, cx);
|
||||
}
|
||||
}
|
||||
editor.toggle_hovered_hunk(&hovered_hunk, cx);
|
||||
cx.notify();
|
||||
return;
|
||||
} else if gutter_hitbox.is_hovered(cx) {
|
||||
@@ -983,7 +966,6 @@ impl EditorElement {
|
||||
text_hitbox: &Hitbox,
|
||||
content_origin: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
line_height: Pixels,
|
||||
em_width: Pixels,
|
||||
autoscroll_containing_element: bool,
|
||||
@@ -1048,10 +1030,8 @@ impl EditorElement {
|
||||
None
|
||||
};
|
||||
|
||||
let x = cursor_character_x - scroll_pixel_position.x;
|
||||
let y = (cursor_position.row().as_f32()
|
||||
- scroll_pixel_position.y / line_height)
|
||||
* line_height;
|
||||
let x = cursor_character_x - scroll_position.x * em_width;
|
||||
let y = (cursor_position.row().as_f32() - scroll_position.y) * line_height;
|
||||
if selection.is_newest {
|
||||
editor.pixel_position_of_newest_cursor = Some(point(
|
||||
text_hitbox.origin.x + x + block_width / 2.,
|
||||
@@ -1191,7 +1171,7 @@ impl EditorElement {
|
||||
line_height: Pixels,
|
||||
gutter_dimensions: &GutterDimensions,
|
||||
gutter_settings: crate::editor_settings::Gutter,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
gutter_hitbox: &Hitbox,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
@@ -1268,6 +1248,7 @@ impl EditorElement {
|
||||
line_height: Pixels,
|
||||
gutter_hitbox: &Hitbox,
|
||||
display_rows: Range<DisplayRow>,
|
||||
anchor_range: Range<Anchor>,
|
||||
snapshot: &EditorSnapshot,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<(DisplayDiffHunk, Option<Hitbox>)> {
|
||||
@@ -1288,30 +1269,84 @@ impl EditorElement {
|
||||
.git
|
||||
.git_gutter
|
||||
.unwrap_or_default();
|
||||
let display_hunks = buffer_snapshot
|
||||
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, snapshot))
|
||||
.dedup()
|
||||
.map(|hunk| match git_gutter_setting {
|
||||
GitGutterSetting::TrackedFiles => {
|
||||
let hitbox = match hunk {
|
||||
DisplayDiffHunk::Unfolded { .. } => {
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
snapshot,
|
||||
line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
);
|
||||
Some(cx.insert_hitbox(hunk_bounds, true))
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let expanded_hunks = &editor.expanded_hunks.hunks;
|
||||
let expanded_hunks_start_ix = expanded_hunks
|
||||
.binary_search_by(|hunk| {
|
||||
hunk.hunk_range
|
||||
.end
|
||||
.cmp(&anchor_range.start, &buffer_snapshot)
|
||||
.then(Ordering::Less)
|
||||
})
|
||||
.unwrap_err();
|
||||
let mut expanded_hunks = expanded_hunks[expanded_hunks_start_ix..].iter().peekable();
|
||||
|
||||
let display_hunks = buffer_snapshot
|
||||
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
|
||||
.filter_map(|hunk| {
|
||||
let display_hunk = diff_hunk_to_display(&hunk, snapshot);
|
||||
|
||||
if let DisplayDiffHunk::Unfolded {
|
||||
multi_buffer_range,
|
||||
status,
|
||||
..
|
||||
} = &display_hunk
|
||||
{
|
||||
let mut is_expanded = false;
|
||||
while let Some(expanded_hunk) = expanded_hunks.peek() {
|
||||
match expanded_hunk
|
||||
.hunk_range
|
||||
.start
|
||||
.cmp(&multi_buffer_range.start, &buffer_snapshot)
|
||||
{
|
||||
Ordering::Less => {
|
||||
expanded_hunks.next();
|
||||
}
|
||||
Ordering::Equal => {
|
||||
is_expanded = true;
|
||||
break;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
DisplayDiffHunk::Folded { .. } => None,
|
||||
};
|
||||
(hunk, hitbox)
|
||||
}
|
||||
GitGutterSetting::Hide => (hunk, None),
|
||||
})
|
||||
.collect();
|
||||
display_hunks
|
||||
match status {
|
||||
DiffHunkStatus::Added => {}
|
||||
DiffHunkStatus::Modified => {}
|
||||
DiffHunkStatus::Removed => {
|
||||
if is_expanded {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(display_hunk)
|
||||
})
|
||||
.dedup()
|
||||
.map(|hunk| match git_gutter_setting {
|
||||
GitGutterSetting::TrackedFiles => {
|
||||
let hitbox = match hunk {
|
||||
DisplayDiffHunk::Unfolded { .. } => {
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
snapshot,
|
||||
line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
);
|
||||
Some(cx.insert_hitbox(hunk_bounds, true))
|
||||
}
|
||||
DisplayDiffHunk::Folded { .. } => None,
|
||||
};
|
||||
(hunk, hitbox)
|
||||
}
|
||||
GitGutterSetting::Hide => (hunk, None),
|
||||
})
|
||||
.collect();
|
||||
display_hunks
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -1413,7 +1448,7 @@ impl EditorElement {
|
||||
AvailableSpace::MaxContent
|
||||
};
|
||||
let scroll_top = scroll_position.y * line_height;
|
||||
let start_x = em_width * 1;
|
||||
let start_x = em_width;
|
||||
|
||||
let mut last_used_color: Option<(PlayerColor, Oid)> = None;
|
||||
|
||||
@@ -1608,7 +1643,16 @@ impl EditorElement {
|
||||
return None;
|
||||
}
|
||||
if snapshot.is_line_folded(multibuffer_row) {
|
||||
return None;
|
||||
// Skip folded indicators, unless it's the starting line of a fold.
|
||||
if multibuffer_row
|
||||
.0
|
||||
.checked_sub(1)
|
||||
.map_or(false, |previous_row| {
|
||||
snapshot.is_line_folded(MultiBufferRow(previous_row))
|
||||
})
|
||||
{
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let button = editor.render_run_indicator(
|
||||
&self.style,
|
||||
@@ -3186,7 +3230,7 @@ impl EditorElement {
|
||||
Some((
|
||||
hunk_bounds,
|
||||
cx.theme().status().modified,
|
||||
Corners::all(1. * line_height),
|
||||
Corners::all(px(0.)),
|
||||
))
|
||||
}
|
||||
DisplayDiffHunk::Unfolded { status, .. } => {
|
||||
@@ -3194,12 +3238,12 @@ impl EditorElement {
|
||||
DiffHunkStatus::Added => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().status().created,
|
||||
Corners::all(0.05 * line_height),
|
||||
Corners::all(px(0.)),
|
||||
),
|
||||
DiffHunkStatus::Modified => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().status().modified,
|
||||
Corners::all(0.05 * line_height),
|
||||
Corners::all(px(0.)),
|
||||
),
|
||||
DiffHunkStatus::Removed => (
|
||||
Bounds::new(
|
||||
@@ -3243,7 +3287,7 @@ impl EditorElement {
|
||||
let start_y = display_row.as_f32() * line_height - scroll_top;
|
||||
let end_y = start_y + line_height;
|
||||
|
||||
let width = 0.275 * line_height;
|
||||
let width = Self::diff_hunk_strip_width(line_height);
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3276,7 +3320,7 @@ impl EditorElement {
|
||||
let start_y = start_row.as_f32() * line_height - scroll_top;
|
||||
let end_y = end_row_in_current_excerpt.as_f32() * line_height - scroll_top;
|
||||
|
||||
let width = 0.275 * line_height;
|
||||
let width = Self::diff_hunk_strip_width(line_height);
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3288,7 +3332,7 @@ impl EditorElement {
|
||||
let start_y = row.as_f32() * line_height - offset - scroll_top;
|
||||
let end_y = start_y + line_height;
|
||||
|
||||
let width = 0.35 * line_height;
|
||||
let width = (0.35 * line_height).floor();
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3297,6 +3341,12 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the width of the diff strip that will be displayed in the gutter.
|
||||
pub(super) fn diff_hunk_strip_width(line_height: Pixels) -> Pixels {
|
||||
// We floor the value to prevent pixel rounding.
|
||||
(0.275 * line_height).floor()
|
||||
}
|
||||
|
||||
fn paint_gutter_indicators(&self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
cx.paint_layer(layout.gutter_hitbox.bounds, |cx| {
|
||||
cx.with_element_namespace("gutter_fold_toggles", |cx| {
|
||||
@@ -3308,9 +3358,6 @@ impl EditorElement {
|
||||
for test_indicator in layout.test_indicators.iter_mut() {
|
||||
test_indicator.paint(cx);
|
||||
}
|
||||
for close_indicator in layout.close_indicators.iter_mut() {
|
||||
close_indicator.paint(cx);
|
||||
}
|
||||
|
||||
if let Some(indicator) = layout.code_actions_indicator.as_mut() {
|
||||
indicator.paint(cx);
|
||||
@@ -3355,11 +3402,11 @@ impl EditorElement {
|
||||
};
|
||||
|
||||
let start_y = layout.gutter_hitbox.top()
|
||||
+ start_row.0 as f32 * layout.position_map.line_height
|
||||
- layout.position_map.scroll_pixel_position.y;
|
||||
+ (start_row.0 as f32 - layout.position_map.scroll_position.y)
|
||||
* layout.position_map.line_height;
|
||||
let end_y = layout.gutter_hitbox.top()
|
||||
+ (end_row.0 + 1) as f32 * layout.position_map.line_height
|
||||
- layout.position_map.scroll_pixel_position.y;
|
||||
+ ((end_row.0 + 1) as f32 - layout.position_map.scroll_position.y)
|
||||
* layout.position_map.line_height;
|
||||
let bounds = Bounds::from_corners(
|
||||
point(layout.gutter_hitbox.left(), start_y),
|
||||
point(layout.gutter_hitbox.left() + highlight_width, end_y),
|
||||
@@ -3710,11 +3757,11 @@ impl EditorElement {
|
||||
)
|
||||
.map(|hunk| {
|
||||
let start_display_row =
|
||||
MultiBufferPoint::new(hunk.associated_range.start.0, 0)
|
||||
MultiBufferPoint::new(hunk.row_range.start.0, 0)
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
let mut end_display_row =
|
||||
MultiBufferPoint::new(hunk.associated_range.end.0, 0)
|
||||
MultiBufferPoint::new(hunk.row_range.end.0, 0)
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
if end_display_row != start_display_row {
|
||||
@@ -3853,8 +3900,9 @@ impl EditorElement {
|
||||
line_height: layout.position_map.line_height,
|
||||
corner_radius,
|
||||
start_y: layout.content_origin.y
|
||||
+ row_range.start.as_f32() * layout.position_map.line_height
|
||||
- layout.position_map.scroll_pixel_position.y,
|
||||
+ (row_range.start.as_f32() - layout.position_map.scroll_position.y)
|
||||
* layout.position_map.line_height,
|
||||
|
||||
lines: row_range
|
||||
.iter_rows()
|
||||
.map(|row| {
|
||||
@@ -3864,18 +3912,17 @@ impl EditorElement {
|
||||
start_x: if row == range.start.row() {
|
||||
layout.content_origin.x
|
||||
+ line_layout.x_for_index(range.start.column() as usize)
|
||||
- layout.position_map.scroll_pixel_position.x
|
||||
- layout.position_map.scroll_x_offset()
|
||||
} else {
|
||||
layout.content_origin.x
|
||||
- layout.position_map.scroll_pixel_position.x
|
||||
layout.content_origin.x - layout.position_map.scroll_x_offset()
|
||||
},
|
||||
end_x: if row == range.end.row() {
|
||||
layout.content_origin.x
|
||||
+ line_layout.x_for_index(range.end.column() as usize)
|
||||
- layout.position_map.scroll_pixel_position.x
|
||||
- layout.position_map.scroll_x_offset()
|
||||
} else {
|
||||
layout.content_origin.x + line_layout.width + line_end_overshoot
|
||||
- layout.position_map.scroll_pixel_position.x
|
||||
- layout.position_map.scroll_x_offset()
|
||||
},
|
||||
}
|
||||
})
|
||||
@@ -4096,46 +4143,6 @@ impl EditorElement {
|
||||
+ 1;
|
||||
self.column_pixels(digit_count, cx)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_hunk_diff_close_indicators(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
gutter_dimensions: &GutterDimensions,
|
||||
gutter_hitbox: &Hitbox,
|
||||
rows_with_hunk_bounds: &HashMap<DisplayRow, Bounds<Pixels>>,
|
||||
expanded_hunks_by_rows: HashMap<DisplayRow, ExpandedHunk>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<AnyElement> {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
expanded_hunks_by_rows
|
||||
.into_iter()
|
||||
.map(|(display_row, hunk)| {
|
||||
let button = editor.close_hunk_diff_button(
|
||||
HoveredHunk {
|
||||
multi_buffer_range: hunk.hunk_range,
|
||||
status: hunk.status,
|
||||
diff_base_byte_range: hunk.diff_base_byte_range,
|
||||
},
|
||||
display_row,
|
||||
cx,
|
||||
);
|
||||
|
||||
prepaint_gutter_button(
|
||||
button,
|
||||
display_row,
|
||||
line_height,
|
||||
gutter_dimensions,
|
||||
scroll_pixel_position,
|
||||
gutter_hitbox,
|
||||
rows_with_hunk_bounds,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -4233,7 +4240,7 @@ fn render_blame_entry(
|
||||
let short_commit_id = blame_entry.sha.display_short();
|
||||
|
||||
let author_name = blame_entry.author.as_deref().unwrap_or("<no name>");
|
||||
let name = util::truncate_and_trailoff(author_name, 20);
|
||||
let name = util::truncate_and_trailoff(author_name, GIT_BLAME_MAX_AUTHOR_CHARS_DISPLAYED);
|
||||
|
||||
let details = blame.read(cx).details_for_entry(&blame_entry);
|
||||
|
||||
@@ -4245,22 +4252,21 @@ fn render_blame_entry(
|
||||
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.font_family(style.text.font().family)
|
||||
.line_height(style.text.line_height)
|
||||
.id(("blame", ix))
|
||||
.children([
|
||||
div()
|
||||
.text_color(sha_color.cursor)
|
||||
.child(short_commit_id)
|
||||
.mr_2(),
|
||||
div()
|
||||
.w_full()
|
||||
.h_flex()
|
||||
.justify_between()
|
||||
.text_color(cx.theme().status().hint)
|
||||
.child(name)
|
||||
.child(relative_timestamp),
|
||||
])
|
||||
.text_color(cx.theme().status().hint)
|
||||
.pr_2()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.items_center()
|
||||
.gap_2()
|
||||
.child(div().text_color(sha_color.cursor).child(short_commit_id))
|
||||
.child(name),
|
||||
)
|
||||
.child(relative_timestamp)
|
||||
.on_mouse_down(MouseButton::Right, {
|
||||
let blame_entry = blame_entry.clone();
|
||||
let details = details.clone();
|
||||
@@ -4569,11 +4575,10 @@ impl LineWithInvisibles {
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let line_height = layout.position_map.line_height;
|
||||
let line_y = line_height
|
||||
* (row.as_f32() - layout.position_map.scroll_pixel_position.y / line_height);
|
||||
let line_y = line_height * (row.as_f32() - layout.position_map.scroll_position.y);
|
||||
|
||||
let mut fragment_origin =
|
||||
content_origin + gpui::point(-layout.position_map.scroll_pixel_position.x, line_y);
|
||||
content_origin + gpui::point(-layout.position_map.scroll_x_offset(), line_y);
|
||||
|
||||
for fragment in &self.fragments {
|
||||
match fragment {
|
||||
@@ -4627,7 +4632,7 @@ impl LineWithInvisibles {
|
||||
(layout.position_map.em_width - invisible_symbol.width).max(Pixels::ZERO) / 2.0;
|
||||
let origin = content_origin
|
||||
+ gpui::point(
|
||||
x_offset + invisible_offset - layout.position_map.scroll_pixel_position.x,
|
||||
x_offset + invisible_offset - layout.position_map.scroll_x_offset(),
|
||||
line_y,
|
||||
);
|
||||
|
||||
@@ -4975,6 +4980,7 @@ impl Element for EditorElement {
|
||||
font_id,
|
||||
font_size,
|
||||
em_width,
|
||||
em_advance,
|
||||
self.max_line_number_width(&snapshot, cx),
|
||||
cx,
|
||||
);
|
||||
@@ -4999,10 +5005,8 @@ impl Element for EditorElement {
|
||||
snapshot
|
||||
} else {
|
||||
let wrap_width = match editor.soft_wrap_mode(cx) {
|
||||
SoftWrap::None => None,
|
||||
SoftWrap::PreferLine => {
|
||||
Some((MAX_LINE_LEN / 2) as f32 * em_advance)
|
||||
}
|
||||
SoftWrap::GitDiff => None,
|
||||
SoftWrap::None => Some((MAX_LINE_LEN / 2) as f32 * em_advance),
|
||||
SoftWrap::EditorWidth => Some(editor_width),
|
||||
SoftWrap::Column(column) => Some(column as f32 * em_advance),
|
||||
SoftWrap::Bounded(column) => {
|
||||
@@ -5157,6 +5161,7 @@ impl Element for EditorElement {
|
||||
line_height,
|
||||
&gutter_hitbox,
|
||||
start_row..end_row,
|
||||
start_anchor..end_anchor,
|
||||
&snapshot,
|
||||
cx,
|
||||
);
|
||||
@@ -5239,7 +5244,6 @@ impl Element for EditorElement {
|
||||
scroll_position = snapshot.scroll_position();
|
||||
}
|
||||
});
|
||||
|
||||
let scroll_pixel_position = point(
|
||||
scroll_position.x * em_width,
|
||||
scroll_position.y * line_height,
|
||||
@@ -5485,15 +5489,6 @@ impl Element for EditorElement {
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let close_indicators = self.layout_hunk_diff_close_indicators(
|
||||
line_height,
|
||||
scroll_pixel_position,
|
||||
&gutter_dimensions,
|
||||
&gutter_hitbox,
|
||||
&rows_with_hunk_bounds,
|
||||
expanded_add_hunks_by_rows,
|
||||
cx,
|
||||
);
|
||||
|
||||
self.layout_signature_help(
|
||||
&hitbox,
|
||||
@@ -5573,7 +5568,7 @@ impl Element for EditorElement {
|
||||
mode: snapshot.mode,
|
||||
position_map: Rc::new(PositionMap {
|
||||
size: bounds.size,
|
||||
scroll_pixel_position,
|
||||
scroll_position,
|
||||
scroll_max,
|
||||
line_layouts,
|
||||
line_height,
|
||||
@@ -5606,7 +5601,6 @@ impl Element for EditorElement {
|
||||
selections,
|
||||
mouse_context_menu,
|
||||
test_indicators,
|
||||
close_indicators,
|
||||
code_actions_indicator,
|
||||
gutter_fold_toggles,
|
||||
crease_trailers,
|
||||
@@ -5748,7 +5742,6 @@ pub struct EditorLayout {
|
||||
selections: Vec<(PlayerColor, Vec<SelectionLayout>)>,
|
||||
code_actions_indicator: Option<AnyElement>,
|
||||
test_indicators: Vec<AnyElement>,
|
||||
close_indicators: Vec<AnyElement>,
|
||||
gutter_fold_toggles: Vec<Option<AnyElement>>,
|
||||
crease_trailers: Vec<Option<CreaseTrailerLayout>>,
|
||||
mouse_context_menu: Option<AnyElement>,
|
||||
@@ -5882,7 +5875,7 @@ struct CreaseTrailerLayout {
|
||||
struct PositionMap {
|
||||
size: Size<Pixels>,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
scroll_max: gpui::Point<f32>,
|
||||
em_width: Pixels,
|
||||
em_advance: Pixels,
|
||||
@@ -5946,6 +5939,9 @@ impl PositionMap {
|
||||
column_overshoot_after_line_end,
|
||||
}
|
||||
}
|
||||
fn scroll_x_offset(&self) -> Pixels {
|
||||
self.em_width * self.scroll_position.x
|
||||
}
|
||||
}
|
||||
|
||||
struct BlockLayout {
|
||||
@@ -6298,10 +6294,21 @@ fn compute_auto_height_layout(
|
||||
.unwrap()
|
||||
.size
|
||||
.width;
|
||||
let em_advance = cx
|
||||
.text_system()
|
||||
.advance(font_id, font_size, 'm')
|
||||
.unwrap()
|
||||
.width;
|
||||
|
||||
let mut snapshot = editor.snapshot(cx);
|
||||
let gutter_dimensions =
|
||||
snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx);
|
||||
let gutter_dimensions = snapshot.gutter_dimensions(
|
||||
font_id,
|
||||
font_size,
|
||||
em_width,
|
||||
em_advance,
|
||||
max_line_number_width,
|
||||
cx,
|
||||
);
|
||||
|
||||
editor.gutter_dimensions = gutter_dimensions;
|
||||
let text_width = width - gutter_dimensions.width;
|
||||
|
||||
@@ -1,309 +1 @@
|
||||
pub mod blame;
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use git::diff::{DiffHunk, DiffHunkStatus};
|
||||
use language::Point;
|
||||
use multi_buffer::{Anchor, MultiBufferRow};
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hunk_status, AnchorRangeExt, DisplayRow,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum DisplayDiffHunk {
|
||||
Folded {
|
||||
display_row: DisplayRow,
|
||||
},
|
||||
|
||||
Unfolded {
|
||||
diff_base_byte_range: Range<usize>,
|
||||
display_row_range: Range<DisplayRow>,
|
||||
multi_buffer_range: Range<Anchor>,
|
||||
status: DiffHunkStatus,
|
||||
},
|
||||
}
|
||||
|
||||
impl DisplayDiffHunk {
|
||||
pub fn start_display_row(&self) -> DisplayRow {
|
||||
match self {
|
||||
&DisplayDiffHunk::Folded { display_row } => display_row,
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range, ..
|
||||
} => display_row_range.start,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_display_row(&self, display_row: DisplayRow) -> bool {
|
||||
let range = match self {
|
||||
&DisplayDiffHunk::Folded { display_row } => display_row..=display_row,
|
||||
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range, ..
|
||||
} => display_row_range.start..=display_row_range.end,
|
||||
};
|
||||
|
||||
range.contains(&display_row)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff_hunk_to_display(
|
||||
hunk: &DiffHunk<MultiBufferRow>,
|
||||
snapshot: &DisplaySnapshot,
|
||||
) -> DisplayDiffHunk {
|
||||
let hunk_start_point = Point::new(hunk.associated_range.start.0, 0);
|
||||
let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0);
|
||||
let hunk_end_point_sub = Point::new(
|
||||
hunk.associated_range
|
||||
.end
|
||||
.0
|
||||
.saturating_sub(1)
|
||||
.max(hunk.associated_range.start.0),
|
||||
0,
|
||||
);
|
||||
|
||||
let status = hunk_status(hunk);
|
||||
let is_removal = status == DiffHunkStatus::Removed;
|
||||
|
||||
let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0);
|
||||
let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0);
|
||||
let folds_range = folds_start..folds_end;
|
||||
|
||||
let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| {
|
||||
let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot);
|
||||
let fold_point_range = fold_point_range.start..=fold_point_range.end;
|
||||
|
||||
let folded_start = fold_point_range.contains(&hunk_start_point);
|
||||
let folded_end = fold_point_range.contains(&hunk_end_point_sub);
|
||||
let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub);
|
||||
|
||||
(folded_start && folded_end) || (is_removal && folded_start_sub)
|
||||
});
|
||||
|
||||
if let Some(fold) = containing_fold {
|
||||
let row = fold.range.start.to_display_point(snapshot).row();
|
||||
DisplayDiffHunk::Folded { display_row: row }
|
||||
} else {
|
||||
let start = hunk_start_point.to_display_point(snapshot).row();
|
||||
|
||||
let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start);
|
||||
let hunk_end_point = Point::new(hunk_end_row.0, 0);
|
||||
|
||||
let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point);
|
||||
let multi_buffer_end = snapshot.buffer_snapshot.anchor_before(hunk_end_point);
|
||||
let end = hunk_end_point.to_display_point(snapshot).row();
|
||||
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range: start..end,
|
||||
multi_buffer_range: multi_buffer_start..multi_buffer_end,
|
||||
status,
|
||||
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Point;
|
||||
use crate::{editor_tests::init_test, hunk_status};
|
||||
use gpui::{Context, TestAppContext};
|
||||
use language::Capability::ReadWrite;
|
||||
use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow};
|
||||
use project::{FakeFs, Project};
|
||||
use unindent::Unindent;
|
||||
#[gpui::test]
|
||||
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
|
||||
use git::diff::DiffHunkStatus;
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
// buffer has two modified hunks with two rows each
|
||||
let buffer_1 = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.TWO
|
||||
1.three
|
||||
1.FOUR
|
||||
1.FIVE
|
||||
1.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
buffer_1.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
1.zero
|
||||
1.one
|
||||
1.two
|
||||
1.three
|
||||
1.four
|
||||
1.five
|
||||
1.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// buffer has a deletion hunk and an insertion hunk
|
||||
let buffer_2 = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.five
|
||||
2.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
buffer_2.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.one-and-a-half
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[
|
||||
// excerpt ends in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt begins in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(5, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_2.clone(),
|
||||
[
|
||||
// excerpt ends at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt starts at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(2, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains a deletion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(1, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains an insertion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(4, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer
|
||||
});
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
|
||||
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.FIVE
|
||||
1.six
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.one
|
||||
2.two
|
||||
2.four
|
||||
2.five
|
||||
2.six"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
let expected = [
|
||||
(
|
||||
DiffHunkStatus::Modified,
|
||||
MultiBufferRow(1)..MultiBufferRow(2),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Modified,
|
||||
MultiBufferRow(2)..MultiBufferRow(3),
|
||||
),
|
||||
//TODO: Define better when and where removed hunks show up at range extremities
|
||||
(
|
||||
DiffHunkStatus::Removed,
|
||||
MultiBufferRow(6)..MultiBufferRow(6),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Removed,
|
||||
MultiBufferRow(8)..MultiBufferRow(8),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Added,
|
||||
MultiBufferRow(10)..MultiBufferRow(11),
|
||||
),
|
||||
];
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12))
|
||||
.map(|hunk| (hunk_status(&hunk), hunk.associated_range))
|
||||
.collect::<Vec<_>>(),
|
||||
&expected,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12))
|
||||
.map(|hunk| (hunk_status(&hunk), hunk.associated_range))
|
||||
.collect::<Vec<_>>(),
|
||||
expected
|
||||
.iter()
|
||||
.rev()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ pub struct GitBlameEntrySummary {
|
||||
impl sum_tree::Item for GitBlameEntry {
|
||||
type Summary = GitBlameEntrySummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
fn summary(&self, _cx: &()) -> Self::Summary {
|
||||
GitBlameEntrySummary { rows: self.rows }
|
||||
}
|
||||
}
|
||||
@@ -207,6 +207,27 @@ impl GitBlame {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn max_author_length(&mut self, cx: &mut ModelContext<Self>) -> usize {
|
||||
self.sync(cx);
|
||||
|
||||
let mut max_author_length = 0;
|
||||
|
||||
for entry in self.entries.iter() {
|
||||
let author_len = entry
|
||||
.blame
|
||||
.as_ref()
|
||||
.and_then(|entry| entry.author.as_ref())
|
||||
.map(|author| author.len());
|
||||
if let Some(author_len) = author_len {
|
||||
if author_len > max_author_length {
|
||||
max_author_length = author_len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
max_author_length
|
||||
}
|
||||
|
||||
pub fn blur(&mut self, _: &mut ModelContext<Self>) {
|
||||
self.focused = false;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
|
||||
opening_range.to_anchors(&snapshot.buffer_snapshot),
|
||||
closing_range.to_anchors(&snapshot.buffer_snapshot),
|
||||
],
|
||||
|theme| theme.editor_document_highlight_read_background,
|
||||
|theme| theme.editor_document_highlight_bracket_background,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2607,7 +2607,7 @@ pub mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
|
||||
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[
|
||||
@@ -2957,7 +2957,7 @@ pub mod tests {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite));
|
||||
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let buffer_1_excerpts = multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
|
||||
@@ -20,8 +20,8 @@ use language::{
|
||||
};
|
||||
use multi_buffer::AnchorRangeExt;
|
||||
use project::{
|
||||
project_settings::ProjectSettings, search::SearchQuery, FormatTrigger, Item as _, Project,
|
||||
ProjectPath,
|
||||
lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Item as _,
|
||||
Project, ProjectPath,
|
||||
};
|
||||
use rpc::proto::{self, update_view, PeerId};
|
||||
use settings::Settings;
|
||||
@@ -68,7 +68,6 @@ impl FollowableItem for Editor {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let replica_id = project.read(cx).replica_id();
|
||||
let buffer_ids = state
|
||||
.excerpts
|
||||
.iter()
|
||||
@@ -92,7 +91,7 @@ impl FollowableItem for Editor {
|
||||
if state.singleton && buffers.len() == 1 {
|
||||
multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx)
|
||||
} else {
|
||||
multibuffer = MultiBuffer::new(replica_id, project.read(cx).capability());
|
||||
multibuffer = MultiBuffer::new(project.read(cx).capability());
|
||||
let mut excerpts = state.excerpts.into_iter().peekable();
|
||||
while let Some(excerpt) = excerpts.peek() {
|
||||
let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
|
||||
@@ -1087,10 +1086,14 @@ impl SerializableItem for Editor {
|
||||
let workspace_id = workspace.database_id()?;
|
||||
|
||||
let buffer = self.buffer().read(cx).as_singleton()?;
|
||||
let path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| file.full_path(cx))
|
||||
.and_then(|full_path| project.read(cx).find_project_path(&full_path, cx))
|
||||
.and_then(|project_path| project.read(cx).absolute_path(&project_path, cx));
|
||||
|
||||
let is_dirty = buffer.read(cx).is_dirty();
|
||||
let local_file = buffer.read(cx).file().and_then(|file| file.as_local());
|
||||
let path = local_file.map(|file| file.abs_path(cx));
|
||||
let mtime = buffer.read(cx).saved_mtime();
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
@@ -928,7 +928,7 @@ mod tests {
|
||||
|
||||
let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
|
||||
let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
buffer.clone(),
|
||||
[
|
||||
|
||||
205
crates/editor/src/proposed_changes_editor.rs
Normal file
205
crates/editor/src/proposed_changes_editor.rs
Normal file
@@ -0,0 +1,205 @@
|
||||
use crate::{Editor, EditorEvent};
|
||||
use collections::HashSet;
|
||||
use futures::{channel::mpsc, future::join_all};
|
||||
use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View};
|
||||
use language::{Buffer, BufferEvent, Capability};
|
||||
use multi_buffer::{ExcerptRange, MultiBuffer};
|
||||
use project::Project;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{any::TypeId, ops::Range, time::Duration};
|
||||
use text::ToOffset;
|
||||
use ui::prelude::*;
|
||||
use workspace::{
|
||||
searchable::SearchableItemHandle, Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation,
|
||||
ToolbarItemView,
|
||||
};
|
||||
|
||||
pub struct ProposedChangesEditor {
|
||||
editor: View<Editor>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
_recalculate_diffs_task: Task<Option<()>>,
|
||||
recalculate_diffs_tx: mpsc::UnboundedSender<Model<Buffer>>,
|
||||
}
|
||||
|
||||
pub struct ProposedChangesBuffer<T> {
|
||||
pub buffer: Model<Buffer>,
|
||||
pub ranges: Vec<Range<T>>,
|
||||
}
|
||||
|
||||
pub struct ProposedChangesEditorToolbar {
|
||||
current_editor: Option<View<ProposedChangesEditor>>,
|
||||
}
|
||||
|
||||
impl ProposedChangesEditor {
|
||||
pub fn new<T: ToOffset>(
|
||||
buffers: Vec<ProposedChangesBuffer<T>>,
|
||||
project: Option<Model<Project>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let mut subscriptions = Vec::new();
|
||||
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
for buffer in buffers {
|
||||
let branch_buffer = buffer.buffer.update(cx, |buffer, cx| buffer.branch(cx));
|
||||
subscriptions.push(cx.subscribe(&branch_buffer, Self::on_buffer_event));
|
||||
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.push_excerpts(
|
||||
branch_buffer,
|
||||
buffer.ranges.into_iter().map(|range| ExcerptRange {
|
||||
context: range,
|
||||
primary: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded();
|
||||
|
||||
Self {
|
||||
editor: cx
|
||||
.new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)),
|
||||
recalculate_diffs_tx,
|
||||
_recalculate_diffs_task: cx.spawn(|_, mut cx| async move {
|
||||
let mut buffers_to_diff = HashSet::default();
|
||||
while let Some(buffer) = recalculate_diffs_rx.next().await {
|
||||
buffers_to_diff.insert(buffer);
|
||||
|
||||
loop {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(250))
|
||||
.await;
|
||||
let mut had_further_changes = false;
|
||||
while let Ok(next_buffer) = recalculate_diffs_rx.try_next() {
|
||||
buffers_to_diff.insert(next_buffer?);
|
||||
had_further_changes = true;
|
||||
}
|
||||
if !had_further_changes {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
join_all(buffers_to_diff.drain().filter_map(|buffer| {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx))
|
||||
.ok()?
|
||||
}))
|
||||
.await;
|
||||
}
|
||||
None
|
||||
}),
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
fn on_buffer_event(
|
||||
&mut self,
|
||||
buffer: Model<Buffer>,
|
||||
event: &BufferEvent,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let BufferEvent::Edited = event {
|
||||
self.recalculate_diffs_tx.unbounded_send(buffer).ok();
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_all_changes(&self, cx: &mut ViewContext<Self>) {
|
||||
let buffers = self.editor.read(cx).buffer.read(cx).all_buffers();
|
||||
for branch_buffer in buffers {
|
||||
if let Some(base_buffer) = branch_buffer.read(cx).diff_base_buffer() {
|
||||
base_buffer.update(cx, |base_buffer, cx| {
|
||||
base_buffer.merge(&branch_buffer, None, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProposedChangesEditor {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
self.editor.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl FocusableView for ProposedChangesEditor {
|
||||
fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle {
|
||||
self.editor.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for ProposedChangesEditor {}
|
||||
|
||||
impl Item for ProposedChangesEditor {
|
||||
type Event = EditorEvent;
|
||||
|
||||
fn tab_icon(&self, _cx: &ui::WindowContext) -> Option<Icon> {
|
||||
Some(Icon::new(IconName::Pencil))
|
||||
}
|
||||
|
||||
fn tab_content_text(&self, _cx: &WindowContext) -> Option<SharedString> {
|
||||
Some("Proposed changes".into())
|
||||
}
|
||||
|
||||
fn as_searchable(&self, _: &View<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
Some(Box::new(self.editor.clone()))
|
||||
}
|
||||
|
||||
fn act_as_type<'a>(
|
||||
&'a self,
|
||||
type_id: TypeId,
|
||||
self_handle: &'a View<Self>,
|
||||
_: &'a AppContext,
|
||||
) -> Option<gpui::AnyView> {
|
||||
if type_id == TypeId::of::<Self>() {
|
||||
Some(self_handle.to_any())
|
||||
} else if type_id == TypeId::of::<Editor>() {
|
||||
Some(self.editor.to_any())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProposedChangesEditorToolbar {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
current_editor: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_toolbar_item_location(&self) -> ToolbarItemLocation {
|
||||
if self.current_editor.is_some() {
|
||||
ToolbarItemLocation::PrimaryRight
|
||||
} else {
|
||||
ToolbarItemLocation::Hidden
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProposedChangesEditorToolbar {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let editor = self.current_editor.clone();
|
||||
Button::new("apply-changes", "Apply All").on_click(move |_, cx| {
|
||||
if let Some(editor) = &editor {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.apply_all_changes(cx);
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolbarItemEvent> for ProposedChangesEditorToolbar {}
|
||||
|
||||
impl ToolbarItemView for ProposedChangesEditorToolbar {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn workspace::ItemHandle>,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) -> workspace::ToolbarItemLocation {
|
||||
self.current_editor =
|
||||
active_pane_item.and_then(|item| item.downcast::<ProposedChangesEditor>());
|
||||
self.get_toolbar_item_location()
|
||||
}
|
||||
}
|
||||
@@ -109,7 +109,7 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn all<'a, D>(&self, cx: &AppContext) -> Vec<Selection<D>>
|
||||
where
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug,
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D>,
|
||||
{
|
||||
let disjoint_anchors = &self.disjoint;
|
||||
let mut disjoint =
|
||||
@@ -850,7 +850,7 @@ pub(crate) fn resolve_multiple<'a, D, I>(
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = Selection<D>>
|
||||
where
|
||||
D: TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug,
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
I: 'a + IntoIterator<Item = &'a Selection<Anchor>>,
|
||||
{
|
||||
let (to_summarize, selections) = selections.into_iter().tee();
|
||||
|
||||
@@ -88,116 +88,3 @@ pub(crate) fn build_editor_with_project(
|
||||
) -> Editor {
|
||||
Editor::new(EditorMode::Full, buffer, Some(project), true, cx)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn editor_hunks(
|
||||
editor: &Editor,
|
||||
snapshot: &DisplaySnapshot,
|
||||
cx: &mut ViewContext<'_, Editor>,
|
||||
) -> Vec<(
|
||||
String,
|
||||
git::diff::DiffHunkStatus,
|
||||
std::ops::Range<crate::DisplayRow>,
|
||||
)> {
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use text::Point;
|
||||
|
||||
use crate::hunk_status;
|
||||
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX)
|
||||
.map(|hunk| {
|
||||
let display_range = Point::new(hunk.associated_range.start.0, 0)
|
||||
.to_display_point(snapshot)
|
||||
.row()
|
||||
..Point::new(hunk.associated_range.end.0, 0)
|
||||
.to_display_point(snapshot)
|
||||
.row();
|
||||
let (_, buffer, _) = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx)
|
||||
.expect("no excerpt for expanded buffer's hunk start");
|
||||
let diff_base = buffer
|
||||
.read(cx)
|
||||
.diff_base()
|
||||
.expect("should have a diff base for expanded hunk")
|
||||
.slice(hunk.diff_base_byte_range.clone())
|
||||
.to_string();
|
||||
(diff_base, hunk_status(&hunk), display_range)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn expanded_hunks(
|
||||
editor: &Editor,
|
||||
snapshot: &DisplaySnapshot,
|
||||
cx: &mut ViewContext<'_, Editor>,
|
||||
) -> Vec<(
|
||||
String,
|
||||
git::diff::DiffHunkStatus,
|
||||
std::ops::Range<crate::DisplayRow>,
|
||||
)> {
|
||||
editor
|
||||
.expanded_hunks
|
||||
.hunks(false)
|
||||
.map(|expanded_hunk| {
|
||||
let hunk_display_range = expanded_hunk
|
||||
.hunk_range
|
||||
.start
|
||||
.to_display_point(snapshot)
|
||||
.row()
|
||||
..expanded_hunk
|
||||
.hunk_range
|
||||
.end
|
||||
.to_display_point(snapshot)
|
||||
.row();
|
||||
let (_, buffer, _) = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.excerpt_containing(expanded_hunk.hunk_range.start, cx)
|
||||
.expect("no excerpt for expanded buffer's hunk start");
|
||||
let diff_base = buffer
|
||||
.read(cx)
|
||||
.diff_base()
|
||||
.expect("should have a diff base for expanded hunk")
|
||||
.slice(expanded_hunk.diff_base_byte_range.clone())
|
||||
.to_string();
|
||||
(diff_base, expanded_hunk.status, hunk_display_range)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn expanded_hunks_background_highlights(
|
||||
editor: &mut Editor,
|
||||
cx: &mut gpui::WindowContext,
|
||||
) -> Vec<std::ops::RangeInclusive<crate::DisplayRow>> {
|
||||
use crate::DisplayRow;
|
||||
|
||||
let mut highlights = Vec::new();
|
||||
|
||||
let mut range_start = 0;
|
||||
let mut previous_highlighted_row = None;
|
||||
for (highlighted_row, _) in editor.highlighted_display_rows(cx) {
|
||||
match previous_highlighted_row {
|
||||
Some(previous_row) => {
|
||||
if previous_row + 1 != highlighted_row.0 {
|
||||
highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row));
|
||||
range_start = highlighted_row.0;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
range_start = highlighted_row.0;
|
||||
}
|
||||
}
|
||||
previous_highlighted_row = Some(highlighted_row.0);
|
||||
}
|
||||
if let Some(previous_row) = previous_highlighted_row {
|
||||
highlights.push(DisplayRow(range_start)..=DisplayRow(previous_row));
|
||||
}
|
||||
|
||||
highlights
|
||||
}
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use crate::{
|
||||
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DisplayPoint, Editor, MultiBuffer,
|
||||
RowExt,
|
||||
display_map::ToDisplayPoint, AnchorRangeExt, Autoscroll, DiffRowHighlight, DisplayPoint,
|
||||
Editor, MultiBuffer, RowExt,
|
||||
};
|
||||
use collections::BTreeMap;
|
||||
use futures::Future;
|
||||
use git::diff::DiffHunkStatus;
|
||||
use gpui::{
|
||||
AnyWindowHandle, AppContext, Keystroke, ModelContext, Pixels, Point, View, ViewContext,
|
||||
VisualTestContext,
|
||||
VisualTestContext, WindowHandle,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, BufferSnapshot, LanguageRegistry};
|
||||
use multi_buffer::ExcerptRange;
|
||||
use multi_buffer::{ExcerptRange, ToPoint};
|
||||
use parking_lot::RwLock;
|
||||
use project::{FakeFs, Project};
|
||||
use std::{
|
||||
@@ -71,11 +71,21 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn for_editor(editor: WindowHandle<Editor>, cx: &mut gpui::TestAppContext) -> Self {
|
||||
let editor_view = editor.root_view(cx).unwrap();
|
||||
Self {
|
||||
cx: VisualTestContext::from_window(*editor.deref(), cx),
|
||||
window: editor.into(),
|
||||
editor: editor_view,
|
||||
assertion_cx: AssertionContextManager::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_multibuffer<const COUNT: usize>(
|
||||
cx: &mut gpui::TestAppContext,
|
||||
excerpts: [&str; COUNT],
|
||||
) -> EditorTestContext {
|
||||
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
|
||||
let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite);
|
||||
let buffer = cx.new_model(|cx| {
|
||||
for excerpt in excerpts.into_iter() {
|
||||
let (text, ranges) = marked_text_ranges(excerpt, false);
|
||||
@@ -297,19 +307,85 @@ impl EditorTestContext {
|
||||
state_context
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_diff_hunks(&mut self, expected_diff: String) {
|
||||
// Normalize the expected diff. If it has no diff markers, then insert blank markers
|
||||
// before each line. Strip any whitespace-only lines.
|
||||
let has_diff_markers = expected_diff
|
||||
.lines()
|
||||
.any(|line| line.starts_with("+") || line.starts_with("-"));
|
||||
let expected_diff_text = expected_diff
|
||||
.split('\n')
|
||||
.map(|line| {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
String::new()
|
||||
} else if has_diff_markers {
|
||||
line.to_string()
|
||||
} else {
|
||||
format!(" {line}")
|
||||
}
|
||||
})
|
||||
.join("\n");
|
||||
|
||||
// Read the actual diff from the editor's row highlights and block
|
||||
// decorations.
|
||||
let actual_diff = self.editor.update(&mut self.cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let text = editor.text(cx);
|
||||
let insertions = editor
|
||||
.highlighted_rows::<DiffRowHighlight>()
|
||||
.map(|(range, _)| {
|
||||
let start = range.start.to_point(&snapshot.buffer_snapshot);
|
||||
let end = range.end.to_point(&snapshot.buffer_snapshot);
|
||||
start.row..end.row
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let deletions = editor
|
||||
.expanded_hunks
|
||||
.hunks
|
||||
.iter()
|
||||
.filter_map(|hunk| {
|
||||
if hunk.blocks.is_empty() {
|
||||
return None;
|
||||
}
|
||||
let row = hunk
|
||||
.hunk_range
|
||||
.start
|
||||
.to_point(&snapshot.buffer_snapshot)
|
||||
.row;
|
||||
let (_, buffer, _) = editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.excerpt_containing(hunk.hunk_range.start, cx)
|
||||
.expect("no excerpt for expanded buffer's hunk start");
|
||||
let deleted_text = buffer
|
||||
.read(cx)
|
||||
.diff_base()
|
||||
.expect("should have a diff base for expanded hunk")
|
||||
.slice(hunk.diff_base_byte_range.clone())
|
||||
.to_string();
|
||||
if let DiffHunkStatus::Modified | DiffHunkStatus::Removed = hunk.status {
|
||||
Some((row, deleted_text))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
format_diff(text, deletions, insertions)
|
||||
});
|
||||
|
||||
pretty_assertions::assert_eq!(actual_diff, expected_diff_text, "unexpected diff state");
|
||||
}
|
||||
|
||||
/// Make an assertion about the editor's text and the ranges and directions
|
||||
/// of its selections using a string containing embedded range markers.
|
||||
///
|
||||
/// See the `util::test::marked_text_ranges` function for more information.
|
||||
#[track_caller]
|
||||
pub fn assert_editor_state(&mut self, marked_text: &str) {
|
||||
let (unmarked_text, expected_selections) = marked_text_ranges(marked_text, true);
|
||||
let buffer_text = self.buffer_text();
|
||||
|
||||
if buffer_text != unmarked_text {
|
||||
panic!("Unmarked text doesn't match buffer text\nBuffer text: {buffer_text:?}\nUnmarked text: {unmarked_text:?}\nRaw buffer text\n{buffer_text}\nRaw unmarked text\n{unmarked_text}");
|
||||
}
|
||||
|
||||
let (expected_text, expected_selections) = marked_text_ranges(marked_text, true);
|
||||
pretty_assertions::assert_eq!(self.buffer_text(), expected_text, "unexpected buffer text");
|
||||
self.assert_selections(expected_selections, marked_text.to_string())
|
||||
}
|
||||
|
||||
@@ -382,25 +458,56 @@ impl EditorTestContext {
|
||||
let actual_marked_text =
|
||||
generate_marked_text(&self.buffer_text(), &actual_selections, true);
|
||||
if expected_selections != actual_selections {
|
||||
panic!(
|
||||
indoc! {"
|
||||
|
||||
{}Editor has unexpected selections.
|
||||
|
||||
Expected selections:
|
||||
{}
|
||||
|
||||
Actual selections:
|
||||
{}
|
||||
"},
|
||||
self.assertion_context(),
|
||||
expected_marked_text,
|
||||
pretty_assertions::assert_eq!(
|
||||
actual_marked_text,
|
||||
expected_marked_text,
|
||||
"{}Editor has unexpected selections",
|
||||
self.assertion_context(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn format_diff(
|
||||
text: String,
|
||||
actual_deletions: Vec<(u32, String)>,
|
||||
actual_insertions: Vec<Range<u32>>,
|
||||
) -> String {
|
||||
let mut diff = String::new();
|
||||
for (row, line) in text.split('\n').enumerate() {
|
||||
let row = row as u32;
|
||||
if row > 0 {
|
||||
diff.push('\n');
|
||||
}
|
||||
if let Some(text) = actual_deletions
|
||||
.iter()
|
||||
.find_map(|(deletion_row, deleted_text)| {
|
||||
if *deletion_row == row {
|
||||
Some(deleted_text)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
{
|
||||
for line in text.lines() {
|
||||
diff.push('-');
|
||||
if !line.is_empty() {
|
||||
diff.push(' ');
|
||||
diff.push_str(line);
|
||||
}
|
||||
diff.push('\n');
|
||||
}
|
||||
}
|
||||
let marker = if actual_insertions.iter().any(|range| range.contains(&row)) {
|
||||
"+ "
|
||||
} else {
|
||||
" "
|
||||
};
|
||||
diff.push_str(format!("{marker}{line}").trim_end());
|
||||
}
|
||||
diff
|
||||
}
|
||||
|
||||
impl Deref for EditorTestContext {
|
||||
type Target = gpui::VisualTestContext;
|
||||
|
||||
|
||||
@@ -9,16 +9,19 @@ use git::GitHostingProviderRegistry;
|
||||
use gpui::{AsyncAppContext, BackgroundExecutor, Context, Model};
|
||||
use http_client::{HttpClient, Method};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use open_ai::OpenAiEmbeddingModel;
|
||||
use project::Project;
|
||||
use semantic_index::{OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status};
|
||||
use semantic_index::{
|
||||
EmbeddingProvider, OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::SettingsStore;
|
||||
use smol::channel::bounded;
|
||||
use smol::io::AsyncReadExt;
|
||||
use smol::Timer;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::path::PathBuf;
|
||||
use std::time::Duration;
|
||||
use std::{
|
||||
fs,
|
||||
@@ -237,6 +240,14 @@ async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct Counts {
|
||||
covered_results: usize,
|
||||
overlapped_results: usize,
|
||||
covered_files: usize,
|
||||
total_results: usize,
|
||||
}
|
||||
|
||||
async fn run_evaluation(
|
||||
only_repo: Option<String>,
|
||||
executor: &BackgroundExecutor,
|
||||
@@ -281,7 +292,7 @@ async fn run_evaluation(
|
||||
let user_store = cx
|
||||
.new_model(|cx| UserStore::new(client.clone(), cx))
|
||||
.unwrap();
|
||||
let node_runtime = Arc::new(FakeNodeRuntime {});
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json");
|
||||
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).unwrap();
|
||||
@@ -297,12 +308,11 @@ async fn run_evaluation(
|
||||
cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx))
|
||||
.unwrap();
|
||||
|
||||
let mut covered_result_count = 0;
|
||||
let mut overlapped_result_count = 0;
|
||||
let mut covered_file_count = 0;
|
||||
let mut total_result_count = 0;
|
||||
let mut counts = Counts::default();
|
||||
eprint!("Running evals.");
|
||||
|
||||
let mut failures = Vec::new();
|
||||
|
||||
for evaluation_project in evaluations {
|
||||
if only_repo
|
||||
.as_ref()
|
||||
@@ -314,27 +324,24 @@ async fn run_evaluation(
|
||||
eprint!("\r\x1B[2K");
|
||||
eprint!(
|
||||
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...",
|
||||
covered_result_count,
|
||||
total_result_count,
|
||||
overlapped_result_count,
|
||||
total_result_count,
|
||||
covered_file_count,
|
||||
total_result_count,
|
||||
counts.covered_results,
|
||||
counts.total_results,
|
||||
counts.overlapped_results,
|
||||
counts.total_results,
|
||||
counts.covered_files,
|
||||
counts.total_results,
|
||||
evaluation_project.repo
|
||||
);
|
||||
|
||||
let repo_db_path =
|
||||
db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_")));
|
||||
let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let repo_dir = repos_dir.join(&evaluation_project.repo);
|
||||
if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() {
|
||||
eprintln!("Skipping {}: directory not found", evaluation_project.repo);
|
||||
continue;
|
||||
}
|
||||
|
||||
let repo_db_path =
|
||||
db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_")));
|
||||
|
||||
let project = cx
|
||||
.update(|cx| {
|
||||
Project::local(
|
||||
@@ -349,125 +356,193 @@ async fn run_evaluation(
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let (worktree, _) = project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_worktree(repo_dir, true, cx)
|
||||
})?
|
||||
.await?;
|
||||
let repo = evaluation_project.repo.clone();
|
||||
if let Err(err) = run_eval_project(
|
||||
evaluation_project,
|
||||
&user_store,
|
||||
repo_db_path,
|
||||
&repo_dir,
|
||||
&mut counts,
|
||||
project,
|
||||
embedding_provider.clone(),
|
||||
fs.clone(),
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
{
|
||||
eprintln!("{repo} eval failed with error: {:?}", err);
|
||||
|
||||
worktree
|
||||
.update(cx, |worktree, _| {
|
||||
worktree.as_local().unwrap().scan_complete()
|
||||
})
|
||||
.unwrap()
|
||||
.await;
|
||||
failures.push((repo, err));
|
||||
}
|
||||
}
|
||||
|
||||
let project_index = cx
|
||||
.update(|cx| semantic_index.create_project_index(project.clone(), cx))
|
||||
.unwrap();
|
||||
wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await;
|
||||
eprintln!(
|
||||
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. {} failed.",
|
||||
counts.covered_results,
|
||||
counts.total_results,
|
||||
counts.overlapped_results,
|
||||
counts.total_results,
|
||||
counts.covered_files,
|
||||
counts.total_results,
|
||||
failures.len(),
|
||||
);
|
||||
|
||||
for query in evaluation_project.queries {
|
||||
let results = cx
|
||||
.update(|cx| {
|
||||
if failures.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
eprintln!("Failures:\n");
|
||||
|
||||
for (index, (repo, failure)) in failures.iter().enumerate() {
|
||||
eprintln!("Failure #{} - {repo}\n{:?}", index + 1, failure);
|
||||
}
|
||||
|
||||
Err(anyhow::anyhow!("Some evals failed."))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn run_eval_project(
|
||||
evaluation_project: EvaluationProject,
|
||||
user_store: &Model<UserStore>,
|
||||
repo_db_path: PathBuf,
|
||||
repo_dir: &Path,
|
||||
counts: &mut Counts,
|
||||
project: Model<Project>,
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider, cx).await?;
|
||||
|
||||
let (worktree, _) = project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_worktree(repo_dir, true, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
worktree
|
||||
.update(cx, |worktree, _| {
|
||||
worktree.as_local().unwrap().scan_complete()
|
||||
})?
|
||||
.await;
|
||||
|
||||
let project_index = cx.update(|cx| semantic_index.create_project_index(project.clone(), cx))?;
|
||||
wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await;
|
||||
|
||||
for query in evaluation_project.queries {
|
||||
let results = {
|
||||
// Retry search up to 3 times in case of timeout, network failure, etc.
|
||||
let mut retries_remaining = 3;
|
||||
let mut result;
|
||||
|
||||
loop {
|
||||
match cx.update(|cx| {
|
||||
let project_index = project_index.read(cx);
|
||||
project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let results = SemanticDb::load_results(results, &fs.clone(), &cx)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut project_covered_result_count = 0;
|
||||
let mut project_overlapped_result_count = 0;
|
||||
let mut project_covered_file_count = 0;
|
||||
let mut covered_result_indices = Vec::new();
|
||||
for expected_result in &query.expected_results {
|
||||
let mut file_matched = false;
|
||||
let mut range_overlapped = false;
|
||||
let mut range_covered = false;
|
||||
|
||||
for (ix, result) in results.iter().enumerate() {
|
||||
if result.path.as_ref() == Path::new(&expected_result.file) {
|
||||
file_matched = true;
|
||||
let start_matched =
|
||||
result.row_range.contains(&expected_result.lines.start());
|
||||
let end_matched = result.row_range.contains(&expected_result.lines.end());
|
||||
|
||||
if start_matched || end_matched {
|
||||
range_overlapped = true;
|
||||
}
|
||||
|
||||
if start_matched && end_matched {
|
||||
range_covered = true;
|
||||
covered_result_indices.push(ix);
|
||||
project_index.search(vec![query.query.clone()], SEARCH_RESULT_LIMIT, cx)
|
||||
}) {
|
||||
Ok(task) => match task.await {
|
||||
Ok(answer) => {
|
||||
result = Ok(answer);
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
result = Err(err);
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
result = Err(err);
|
||||
}
|
||||
}
|
||||
|
||||
if range_covered {
|
||||
project_covered_result_count += 1
|
||||
};
|
||||
if range_overlapped {
|
||||
project_overlapped_result_count += 1
|
||||
};
|
||||
if file_matched {
|
||||
project_covered_file_count += 1
|
||||
};
|
||||
if retries_remaining > 0 {
|
||||
eprintln!(
|
||||
"Retrying search after it failed on query {:?} with {:?}",
|
||||
query, result
|
||||
);
|
||||
retries_remaining -= 1;
|
||||
} else {
|
||||
eprintln!(
|
||||
"Ran out of retries; giving up on search which failed on query {:?} with {:?}",
|
||||
query, result
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
let outcome_repo = evaluation_project.repo.clone();
|
||||
|
||||
let query_results = EvaluationQueryOutcome {
|
||||
repo: outcome_repo,
|
||||
query: query.query,
|
||||
total_result_count: query.expected_results.len(),
|
||||
covered_result_count: project_covered_result_count,
|
||||
overlapped_result_count: project_overlapped_result_count,
|
||||
covered_file_count: project_covered_file_count,
|
||||
expected_results: query.expected_results,
|
||||
actual_results: results
|
||||
.iter()
|
||||
.map(|result| EvaluationSearchResult {
|
||||
file: result.path.to_string_lossy().to_string(),
|
||||
lines: result.row_range.clone(),
|
||||
})
|
||||
.collect(),
|
||||
covered_result_indices,
|
||||
SemanticDb::load_results(result?, &fs.clone(), &cx).await?
|
||||
};
|
||||
|
||||
let mut project_covered_result_count = 0;
|
||||
let mut project_overlapped_result_count = 0;
|
||||
let mut project_covered_file_count = 0;
|
||||
let mut covered_result_indices = Vec::new();
|
||||
for expected_result in &query.expected_results {
|
||||
let mut file_matched = false;
|
||||
let mut range_overlapped = false;
|
||||
let mut range_covered = false;
|
||||
|
||||
for (ix, result) in results.iter().enumerate() {
|
||||
if result.path.as_ref() == Path::new(&expected_result.file) {
|
||||
file_matched = true;
|
||||
let start_matched = result.row_range.contains(&expected_result.lines.start());
|
||||
let end_matched = result.row_range.contains(&expected_result.lines.end());
|
||||
|
||||
if start_matched || end_matched {
|
||||
range_overlapped = true;
|
||||
}
|
||||
|
||||
if start_matched && end_matched {
|
||||
range_covered = true;
|
||||
covered_result_indices.push(ix);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if range_covered {
|
||||
project_covered_result_count += 1
|
||||
};
|
||||
if range_overlapped {
|
||||
project_overlapped_result_count += 1
|
||||
};
|
||||
if file_matched {
|
||||
project_covered_file_count += 1
|
||||
};
|
||||
|
||||
overlapped_result_count += query_results.overlapped_result_count;
|
||||
covered_result_count += query_results.covered_result_count;
|
||||
covered_file_count += query_results.covered_file_count;
|
||||
total_result_count += query_results.total_result_count;
|
||||
|
||||
println!("{}", serde_json::to_string(&query_results).unwrap());
|
||||
}
|
||||
let outcome_repo = evaluation_project.repo.clone();
|
||||
|
||||
user_store
|
||||
.update(cx, |_, _| {
|
||||
drop(semantic_index);
|
||||
drop(project);
|
||||
drop(worktree);
|
||||
drop(project_index);
|
||||
})
|
||||
.unwrap();
|
||||
let query_results = EvaluationQueryOutcome {
|
||||
repo: outcome_repo,
|
||||
query: query.query,
|
||||
total_result_count: query.expected_results.len(),
|
||||
covered_result_count: project_covered_result_count,
|
||||
overlapped_result_count: project_overlapped_result_count,
|
||||
covered_file_count: project_covered_file_count,
|
||||
expected_results: query.expected_results,
|
||||
actual_results: results
|
||||
.iter()
|
||||
.map(|result| EvaluationSearchResult {
|
||||
file: result.path.to_string_lossy().to_string(),
|
||||
lines: result.row_range.clone(),
|
||||
})
|
||||
.collect(),
|
||||
covered_result_indices,
|
||||
};
|
||||
|
||||
counts.overlapped_results += query_results.overlapped_result_count;
|
||||
counts.covered_results += query_results.covered_result_count;
|
||||
counts.covered_files += query_results.covered_file_count;
|
||||
counts.total_results += query_results.total_result_count;
|
||||
|
||||
println!("{}", serde_json::to_string(&query_results)?);
|
||||
}
|
||||
|
||||
eprint!(
|
||||
"Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.",
|
||||
covered_result_count,
|
||||
total_result_count,
|
||||
overlapped_result_count,
|
||||
total_result_count,
|
||||
covered_file_count,
|
||||
total_result_count,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
user_store.update(cx, |_, _| {
|
||||
drop(semantic_index);
|
||||
drop(project);
|
||||
drop(worktree);
|
||||
drop(project_index);
|
||||
})
|
||||
}
|
||||
|
||||
async fn wait_for_indexing_complete(
|
||||
@@ -524,7 +599,7 @@ async fn fetch_eval_repos(
|
||||
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json");
|
||||
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).unwrap();
|
||||
|
||||
eprint!("Fetching evaluation repositories...");
|
||||
eprintln!("Fetching evaluation repositories...");
|
||||
|
||||
executor
|
||||
.scoped(move |scope| {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user