Compare commits
130 Commits
ssh-poolin
...
stream-liv
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
69bbcffcd9 | ||
|
|
bd187883da | ||
|
|
4f9217bca0 | ||
|
|
ce5222f1df | ||
|
|
cf7b0c8971 | ||
|
|
7bc4cb9868 | ||
|
|
f84f3ffeb7 | ||
|
|
c564a4a26c | ||
|
|
515fd7b75f | ||
|
|
662a4440cc | ||
|
|
5dee43b05c | ||
|
|
c8003c0697 | ||
|
|
83e2889d63 | ||
|
|
d49cd0019f | ||
|
|
0ba40bdfb8 | ||
|
|
f6cd97f6fd | ||
|
|
774a8bf039 | ||
|
|
4431ef1870 | ||
|
|
b3f0ba1430 | ||
|
|
a5f52f0f04 | ||
|
|
63524a2354 | ||
|
|
90edb7189f | ||
|
|
518f6b529b | ||
|
|
fb97e462de | ||
|
|
5b7fa05a87 | ||
|
|
d310a1269f | ||
|
|
9818835c9d | ||
|
|
f3b7f5944d | ||
|
|
fc5cde9434 | ||
|
|
6ea4662326 | ||
|
|
9d12308d06 | ||
|
|
21137d2ba7 | ||
|
|
273cb1921f | ||
|
|
cfa20ff221 | ||
|
|
759d136fe6 | ||
|
|
322aa41ad6 | ||
|
|
3e2f1d733c | ||
|
|
3fed738d2f | ||
|
|
5893e85708 | ||
|
|
1356665ed3 | ||
|
|
9739da8de3 | ||
|
|
249c8a4d96 | ||
|
|
f919fa92de | ||
|
|
21b58643fa | ||
|
|
6a0bcca9ec | ||
|
|
84328c303b | ||
|
|
f7b2b41df9 | ||
|
|
7a6b6435c4 | ||
|
|
bdb54decdc | ||
|
|
b5c41eeb98 | ||
|
|
719a7f7890 | ||
|
|
1b84fee708 | ||
|
|
58e5d4ff02 | ||
|
|
85ff03cde0 | ||
|
|
a3f0bb4547 | ||
|
|
93b20008e0 | ||
|
|
188a893fd0 | ||
|
|
052b746fbd | ||
|
|
80f89059aa | ||
|
|
826d83edfe | ||
|
|
f5d5fab2c8 | ||
|
|
fab2f22a89 | ||
|
|
a451bcc3c4 | ||
|
|
5e9ff3e313 | ||
|
|
cc81f19c68 | ||
|
|
5e89fba681 | ||
|
|
67eb652bf1 | ||
|
|
e0ea9a9ab5 | ||
|
|
ff29a34298 | ||
|
|
6686f66949 | ||
|
|
8a96ea25c4 | ||
|
|
cdddb4d360 | ||
|
|
03bd95405b | ||
|
|
177dfdf900 | ||
|
|
2ab0b3b819 | ||
|
|
888fec9299 | ||
|
|
e86b096b92 | ||
|
|
ffe36c9beb | ||
|
|
2d16d2d036 | ||
|
|
c69da2df70 | ||
|
|
5506669b06 | ||
|
|
b13940720a | ||
|
|
db61711753 | ||
|
|
c12a9f2673 | ||
|
|
2e32f1c8a1 | ||
|
|
03a1c8d2b8 | ||
|
|
d7a277607b | ||
|
|
fc8a72cdd8 | ||
|
|
1acebb3c47 | ||
|
|
78ed0c9312 | ||
|
|
98d2e5fe73 | ||
|
|
4325819075 | ||
|
|
c19c89e6df | ||
|
|
507929cb79 | ||
|
|
7d0a7aff44 | ||
|
|
92ba18342c | ||
|
|
6de5ace116 | ||
|
|
c9db1b9a7b | ||
|
|
24cb694494 | ||
|
|
85bdd9329b | ||
|
|
d40ea8fc81 | ||
|
|
5f9a1482f1 | ||
|
|
5c2238c7a5 | ||
|
|
5769065f27 | ||
|
|
0173479d18 | ||
|
|
08a3c54bac | ||
|
|
3617873431 | ||
|
|
6eb6788201 | ||
|
|
ebc3031fd9 | ||
|
|
42a7402cc5 | ||
|
|
6cd5c9e32f | ||
|
|
d45b830412 | ||
|
|
3a9c071e6e | ||
|
|
ca861bb1bb | ||
|
|
454d3dd52b | ||
|
|
3ec015b325 | ||
|
|
02718284ef | ||
|
|
b5f816dde5 | ||
|
|
499e1459eb | ||
|
|
b5aea548a8 | ||
|
|
3c6a505166 | ||
|
|
efc4d3efdf | ||
|
|
4214ed927f | ||
|
|
e040b200bc | ||
|
|
1dba50f42f | ||
|
|
0ffc92ab65 | ||
|
|
d30361537e | ||
|
|
510c71d41b | ||
|
|
013d2d52fd | ||
|
|
eee91f3f1b |
2
.github/workflows/bump_collab_staging.yml
vendored
2
.github/workflows/bump_collab_staging.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/bump_patch_version.yml
vendored
2
.github/workflows/bump_patch_version.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}
|
||||
|
||||
40
.github/workflows/ci.yml
vendored
40
.github/workflows/ci.yml
vendored
@@ -25,6 +25,7 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
migration_checks:
|
||||
@@ -36,7 +37,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0 # fetch full history
|
||||
@@ -78,25 +79,26 @@ jobs:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@v1.24.6
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -115,17 +117,18 @@ jobs:
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
RUSTFLAGS="-D warnings" cargo build --workspace --bins --all-features
|
||||
cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
RUSTFLAGS="-D warnings" cargo build -p remote_server
|
||||
cargo build -p remote_server
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -133,7 +136,7 @@ jobs:
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -153,11 +156,12 @@ jobs:
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build Zed
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p zed
|
||||
run: cargo build -p zed
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -165,7 +169,7 @@ jobs:
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -179,16 +183,20 @@ jobs:
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Build Remote Server
|
||||
run: RUSTFLAGS="-D warnings" cargo build -p remote_server
|
||||
run: cargo build -p remote_server
|
||||
|
||||
# todo(windows): Actually run the tests
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-1
|
||||
steps:
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -203,7 +211,7 @@ jobs:
|
||||
run: cargo xtask clippy
|
||||
|
||||
- name: Build Zed
|
||||
run: $env:RUSTFLAGS="-D warnings"; cargo build
|
||||
run: cargo build
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
@@ -229,7 +237,7 @@ jobs:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
@@ -314,7 +322,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -361,7 +369,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- name: Set up uv
|
||||
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
|
||||
with:
|
||||
|
||||
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- name: Set up uv
|
||||
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
|
||||
with:
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
2
.github/workflows/danger.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
|
||||
2
.github/workflows/deploy_cloudflare.yml
vendored
2
.github/workflows/deploy_cloudflare.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
8
.github/workflows/deploy_collab.yml
vendored
8
.github/workflows/deploy_collab.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
run: doctl registry login
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
}
|
||||
|
||||
- name: Check for Typos with Typos-CLI
|
||||
uses: crate-ci/typos@v1.24.6
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
files: ./docs/
|
||||
|
||||
2
.github/workflows/publish_extension_cli.yml
vendored
2
.github/workflows/publish_extension_cli.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
2
.github/workflows/randomized_tests.yml
vendored
2
.github/workflows/randomized_tests.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
|
||||
12
.github/workflows/release_nightly.yml
vendored
12
.github/workflows/release_nightly.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- test
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -149,7 +149,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
@@ -182,7 +182,7 @@ jobs:
|
||||
- bundle-linux-arm
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
The Code of Conduct for this repository can be found online at [zed.dev/docs/code-of-conduct](https://zed.dev/docs/code-of-conduct).
|
||||
The Code of Conduct for this repository can be found online at [zed.dev/code-of-conduct](https://zed.dev/code-of-conduct).
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor!
|
||||
|
||||
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
|
||||
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
|
||||
|
||||
## Contribution ideas
|
||||
|
||||
|
||||
633
Cargo.lock
generated
633
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
18
Cargo.toml
18
Cargo.toml
@@ -23,7 +23,6 @@ members = [
|
||||
"crates/context_servers",
|
||||
"crates/copilot",
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
"crates/diagnostics",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/editor",
|
||||
@@ -45,7 +44,6 @@ members = [
|
||||
"crates/google_ai",
|
||||
"crates/gpui",
|
||||
"crates/gpui_macros",
|
||||
"crates/headless",
|
||||
"crates/html_to_markdown",
|
||||
"crates/http_client",
|
||||
"crates/image_viewer",
|
||||
@@ -119,6 +117,7 @@ members = [
|
||||
"crates/theme_selector",
|
||||
"crates/time_format",
|
||||
"crates/title_bar",
|
||||
"crates/toolchain_selector",
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
@@ -139,7 +138,6 @@ members = [
|
||||
"extensions/astro",
|
||||
"extensions/clojure",
|
||||
"extensions/csharp",
|
||||
"extensions/dart",
|
||||
"extensions/deno",
|
||||
"extensions/elixir",
|
||||
"extensions/elm",
|
||||
@@ -201,7 +199,6 @@ command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
context_servers = { path = "crates/context_servers" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
db = { path = "crates/db" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
extension = { path = "crates/extension" }
|
||||
@@ -219,7 +216,6 @@ go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
headless = { path = "crates/headless" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
@@ -294,6 +290,7 @@ theme_importer = { path = "crates/theme_importer" }
|
||||
theme_selector = { path = "crates/theme_selector" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
title_bar = { path = "crates/title_bar" }
|
||||
toolchain_selector = { path = "crates/toolchain_selector" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
@@ -373,6 +370,7 @@ linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nbformat = "0.3.1"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.19.0"
|
||||
@@ -380,6 +378,11 @@ ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
profiling = "1"
|
||||
@@ -388,6 +391,7 @@ prost-build = "0.9"
|
||||
prost-types = "0.9"
|
||||
pulldown-cmark = { version = "0.12.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
rayon = "1.8"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [
|
||||
@@ -399,7 +403,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f
|
||||
"stream",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.15", default-features = false, features = [
|
||||
runtimelib = { version = "0.16.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
@@ -468,7 +472,7 @@ tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unindent = "0.1.7"
|
||||
unicode-segmentation = "1.11"
|
||||
unicode-segmentation = "1.10"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "serde"] }
|
||||
wasmparser = "0.215"
|
||||
|
||||
@@ -58,6 +58,7 @@
|
||||
"gitignore": "vcs",
|
||||
"gitkeep": "vcs",
|
||||
"gitmodules": "vcs",
|
||||
"gleam": "gleam",
|
||||
"go": "go",
|
||||
"gql": "graphql",
|
||||
"graphql": "graphql",
|
||||
@@ -83,6 +84,7 @@
|
||||
"j2k": "image",
|
||||
"java": "java",
|
||||
"jfif": "image",
|
||||
"jl": "julia",
|
||||
"jp2": "image",
|
||||
"jpeg": "image",
|
||||
"jpg": "image",
|
||||
@@ -90,7 +92,6 @@
|
||||
"json": "storage",
|
||||
"jsonc": "storage",
|
||||
"jsx": "react",
|
||||
"julia": "julia",
|
||||
"jxl": "image",
|
||||
"kt": "kotlin",
|
||||
"ldf": "storage",
|
||||
@@ -264,6 +265,9 @@
|
||||
"fsharp": {
|
||||
"icon": "icons/file_icons/fsharp.svg"
|
||||
},
|
||||
"gleam": {
|
||||
"icon": "icons/file_icons/gleam.svg"
|
||||
},
|
||||
"go": {
|
||||
"icon": "icons/file_icons/go.svg"
|
||||
},
|
||||
|
||||
6
assets/icons/file_icons/gleam.svg
Normal file
6
assets/icons/file_icons/gleam.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none">
|
||||
<path fill-rule="evenodd" fill="black" d="M 3.828125 14.601562 C 3.894531 15.726562 5.183594 16.375 6.132812 15.785156 L 6.136719 15.785156 L 8.988281 13.824219 C 8.996094 13.816406 9.007812 13.8125 9.015625 13.804688 C 9.203125 13.675781 9.4375 13.636719 9.65625 13.691406 L 12.988281 14.550781 C 14.105469 14.839844 15.140625 13.769531 14.8125 12.667969 L 13.832031 9.386719 C 13.769531 9.167969 13.800781 8.9375 13.921875 8.75 C 13.921875 8.746094 13.925781 8.746094 13.925781 8.746094 L 15.777344 5.863281 L 15.777344 5.859375 C 15.78125 5.851562 15.785156 5.84375 15.789062 5.835938 L 15.792969 5.835938 C 16.382812 4.871094 15.6875 3.582031 14.542969 3.554688 L 11.109375 3.472656 C 10.878906 3.464844 10.664062 3.359375 10.519531 3.183594 L 8.339844 0.542969 C 8.019531 0.152344 7.550781 -0.015625 7.105469 0.0078125 L 7.101562 0.0078125 C 7.039062 0.0117188 6.976562 0.0195312 6.914062 0.0273438 C 6.414062 0.117188 5.945312 0.453125 5.75 1 L 4.609375 4.222656 C 4.535156 4.4375 4.367188 4.613281 4.152344 4.695312 L 0.957031 5.945312 C -0.121094 6.363281 -0.328125 7.835938 0.589844 8.535156 L 3.316406 10.609375 C 3.5 10.75 3.609375 10.960938 3.625 11.191406 Z M 7.515625 1.847656 C 7.421875 1.730469 7.296875 1.695312 7.183594 1.714844 C 7.066406 1.734375 6.960938 1.8125 6.914062 1.953125 L 5.867188 4.902344 C 5.699219 5.382812 5.328125 5.765625 4.851562 5.949219 L 1.925781 7.09375 C 1.785156 7.148438 1.710938 7.253906 1.695312 7.371094 C 1.679688 7.484375 1.71875 7.605469 1.839844 7.695312 L 4.335938 9.597656 C 4.742188 9.90625 4.992188 10.375 5.023438 10.882812 L 5.207031 14.003906 C 5.214844 14.152344 5.296875 14.253906 5.398438 14.304688 C 5.503906 14.355469 5.632812 14.355469 5.757812 14.269531 L 8.347656 12.492188 C 8.765625 12.207031 9.292969 12.113281 9.785156 12.242188 L 12.824219 13.027344 C 12.972656 13.066406 13.09375 13.023438 13.175781 12.9375 C 13.257812 12.855469 13.296875 12.734375 13.253906 12.589844 L 12.355469 9.589844 C 12.210938 9.105469 12.285156 8.578125 12.558594 8.148438 L 14.253906 5.511719 C 14.335938 5.386719 14.332031 5.257812 14.277344 5.15625 C 14.222656 5.054688 14.117188 4.980469 13.964844 4.976562 L 10.824219 4.902344 C 10.316406 4.886719 9.835938 4.65625 9.511719 4.261719 Z M 7.515625 1.847656 "/>
|
||||
<path fill="black" d="M 5.71875 7.257812 C 5.671875 7.25 5.628906 7.246094 5.582031 7.246094 C 5.09375 7.246094 4.695312 7.644531 4.695312 8.128906 C 4.695312 8.613281 5.09375 9.011719 5.582031 9.011719 C 6.070312 9.011719 6.46875 8.613281 6.46875 8.128906 C 6.46875 7.6875 6.140625 7.320312 5.71875 7.257812 Z M 5.71875 7.257812 "/>
|
||||
<path fill="black" d="M 11.019531 7.953125 C 10.976562 7.957031 10.929688 7.960938 10.886719 7.960938 C 10.398438 7.960938 10 7.5625 10 7.078125 C 10 6.59375 10.398438 6.195312 10.886719 6.195312 C 11.371094 6.195312 11.773438 6.59375 11.773438 7.078125 C 11.773438 7.519531 11.445312 7.886719 11.019531 7.953125 Z M 11.019531 7.953125 "/>
|
||||
<path fill="black" d="M 7.269531 9.089844 C 7.53125 8.988281 7.828125 9.113281 7.933594 9.375 C 8.125 9.859375 8.503906 9.996094 8.796875 9.949219 C 9.082031 9.898438 9.378906 9.664062 9.378906 9.136719 C 9.378906 8.855469 9.605469 8.628906 9.886719 8.628906 C 10.167969 8.628906 10.398438 8.855469 10.398438 9.136719 C 10.398438 10.140625 9.757812 10.816406 8.96875 10.949219 C 8.1875 11.078125 7.351562 10.664062 6.988281 9.75 C 6.882812 9.488281 7.011719 9.195312 7.269531 9.089844 Z M 7.269531 9.089844 "/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
7
assets/icons/list_x.svg
Normal file
7
assets/icons/list_x.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.33333 8H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.6667 4H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.6667 12H3" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.6667 6.66663L11 9.33329" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 6.66663L13.6667 9.33329" stroke="#FBF1C7" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 579 B |
@@ -313,6 +313,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -505,6 +514,13 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-y": "editor::ApplyDiffHunk",
|
||||
"ctrl-alt-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
@@ -516,6 +532,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-shift-enter": "assistant::Edit",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
|
||||
@@ -201,6 +201,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-shift-enter": "assistant::Edit",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
@@ -350,6 +351,15 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -538,6 +548,13 @@
|
||||
"ctrl-enter": "assistant::InlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProposedChangesEditor",
|
||||
"bindings": {
|
||||
"cmd-shift-y": "editor::ApplyDiffHunk",
|
||||
"cmd-shift-a": "editor::ApplyAllDiffHunks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
|
||||
@@ -88,7 +88,6 @@ origin: (f64, f64),
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Update the Rectangle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
@@ -117,7 +116,6 @@ pub struct Circle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Update the Circle's new function to take an origin parameter</description>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(radius: f64) -> Self {
|
||||
@@ -134,7 +132,6 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add an import for the std::fmt module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Rectangle {
|
||||
@@ -147,7 +144,10 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add a Display implementation for Rectangle</description>
|
||||
<description>
|
||||
Add a manual Display implementation for Rectangle.
|
||||
Currently, this is the same as a derived Display implementation.
|
||||
</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Rectangle { width, height }
|
||||
@@ -169,7 +169,6 @@ impl fmt::Display for Rectangle {
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add an import for the `std::fmt` module</description>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Circle {
|
||||
@@ -181,7 +180,6 @@ use std::fmt;
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add a Display implementation for Circle</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Circle { radius }
|
||||
@@ -369,6 +369,17 @@
|
||||
/// 5. Never show the scrollbar:
|
||||
/// "never"
|
||||
"show": null
|
||||
},
|
||||
// Settings related to indent guides in the project panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the project panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
@@ -392,7 +403,35 @@
|
||||
"auto_reveal_entries": true,
|
||||
/// Whether to fold directories automatically
|
||||
/// when a directory has only one directory inside.
|
||||
"auto_fold_dirs": true
|
||||
"auto_fold_dirs": true,
|
||||
// Settings related to indent guides in the outline panel.
|
||||
"indent_guides": {
|
||||
// When to show indent guides in the outline panel.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Always show indent guides:
|
||||
// "always"
|
||||
// 2. Never show indent guides:
|
||||
// "never"
|
||||
"show": "always"
|
||||
},
|
||||
/// Scrollbar-related settings
|
||||
"scrollbar": {
|
||||
/// When to show the scrollbar in the project panel.
|
||||
/// This setting can take four values:
|
||||
///
|
||||
/// 1. null (default): Inherit editor settings
|
||||
/// 2. Show the scrollbar if there's important information or
|
||||
/// follow the system's configured behavior (default):
|
||||
/// "auto"
|
||||
/// 3. Match the system's configured behavior:
|
||||
/// "system"
|
||||
/// 4. Always show the scrollbar:
|
||||
/// "always"
|
||||
/// 5. Never show the scrollbar:
|
||||
/// "never"
|
||||
"show": null
|
||||
}
|
||||
},
|
||||
"collaboration_panel": {
|
||||
// Whether to show the collaboration panel button in the status bar.
|
||||
@@ -775,6 +814,7 @@
|
||||
"tasks": {
|
||||
"variables": {}
|
||||
},
|
||||
"toolchain": { "name": "default", "path": "default" },
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
// use those languages.
|
||||
@@ -1099,13 +1139,13 @@
|
||||
// }
|
||||
"command_aliases": {},
|
||||
// ssh_connections is an array of ssh connections.
|
||||
// By default this setting is null, which disables the direct ssh connection support.
|
||||
// You can configure these from `project: Open Remote` in the command palette.
|
||||
// Zed's ssh support will pull configuration from your ~/.ssh too.
|
||||
// Examples:
|
||||
// [
|
||||
// {
|
||||
// "host": "example-box",
|
||||
// // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"]
|
||||
// "projects": [
|
||||
// {
|
||||
// "paths": ["/home/user/code/zed"]
|
||||
@@ -1113,7 +1153,7 @@
|
||||
// ]
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": null,
|
||||
"ssh_connections": [],
|
||||
// Configures the Context Server Protocol binaries
|
||||
//
|
||||
// Examples:
|
||||
|
||||
@@ -23,6 +23,7 @@ language.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -13,7 +13,8 @@ use language::{
|
||||
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
|
||||
use util::truncate_and_trailoff;
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -446,6 +447,8 @@ impl ActivityIndicator {
|
||||
|
||||
impl EventEmitter<Event> for ActivityIndicator {}
|
||||
|
||||
const MAX_MESSAGE_LEN: usize = 50;
|
||||
|
||||
impl Render for ActivityIndicator {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let result = h_flex()
|
||||
@@ -456,6 +459,7 @@ impl Render for ActivityIndicator {
|
||||
return result;
|
||||
};
|
||||
let this = cx.view().downgrade();
|
||||
let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
.trigger(
|
||||
@@ -464,7 +468,21 @@ impl Render for ActivityIndicator {
|
||||
.id("activity-indicator-status")
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.child(Label::new(content.message).size(LabelSize::Small))
|
||||
.map(|button| {
|
||||
if truncate_content {
|
||||
button
|
||||
.child(
|
||||
Label::new(truncate_and_trailoff(
|
||||
&content.message,
|
||||
MAX_MESSAGE_LEN,
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.tooltip(move |cx| Tooltip::text(&content.message, cx))
|
||||
} else {
|
||||
button.child(Label::new(content.message).size(LabelSize::Small))
|
||||
}
|
||||
})
|
||||
.when_some(content.on_click, |this, handler| {
|
||||
this.on_click(cx.listener(move |this, _, cx| {
|
||||
handler(this, cx);
|
||||
|
||||
@@ -41,12 +41,10 @@ use prompts::PromptLoadingParams;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticDb};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::workflow_command::WorkflowSlashCommand;
|
||||
use slash_command::{
|
||||
auto_command, cargo_workspace_command, context_server_command, default_command, delta_command,
|
||||
diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command,
|
||||
prompt_command, search_command, symbols_command, tab_command, terminal_command,
|
||||
workflow_command,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@@ -59,6 +57,7 @@ actions!(
|
||||
assistant,
|
||||
[
|
||||
Assist,
|
||||
Edit,
|
||||
Split,
|
||||
CopyCode,
|
||||
CycleMessageRole,
|
||||
@@ -298,25 +297,64 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Prompts) {
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
cx.update_model(
|
||||
&manager,
|
||||
|manager: &mut context_servers::manager::ContextServerManager, cx| {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
let context_server_registry = ContextServerRegistry::global(cx);
|
||||
if let Some(server) = manager.get_server(server_id) {
|
||||
cx.spawn(|_, _| async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if protocol.capable(context_servers::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
for tool in tools.tools {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
context_server_registry.register_tool(
|
||||
server.id.clone(),
|
||||
tool.name.as_str(),
|
||||
);
|
||||
tool_registry.register_tool(
|
||||
tools::context_server_tool::ContextServerTool::new(
|
||||
server.id.clone(),
|
||||
tool
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -334,6 +372,14 @@ fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
context_server_registry.unregister_command(&server_id, &command_name);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tools) = context_server_registry.get_tools(server_id) {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
for tool_name in tools {
|
||||
tool_registry.unregister_tool_by_name(&tool_name);
|
||||
context_server_registry.unregister_tool(&server_id, &tool_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
@@ -397,22 +443,6 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
cx.observe_global::<SettingsStore>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
let prompt_builder = prompt_builder.clone();
|
||||
move |cx| {
|
||||
if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()),
|
||||
true,
|
||||
);
|
||||
} else {
|
||||
slash_command_registry.unregister_command_by_name(WorkflowSlashCommand::NAME);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe_flag::<project_command::ProjectSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
|
||||
@@ -13,10 +13,11 @@ use crate::{
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
Assist, AssistantPatch, AssistantPatchStatus, CacheStatus, ConfirmCommand, Content, Context,
|
||||
ContextEvent, ContextId, ContextStore, ContextStoreEvent, CopyCode, CycleMessageRole,
|
||||
DeployHistory, DeployPromptLibrary, InlineAssistant, InsertDraggedFiles, InsertIntoEditor,
|
||||
Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector,
|
||||
NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection,
|
||||
RemoteContextMetadata, SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector,
|
||||
DeployHistory, DeployPromptLibrary, Edit, InlineAssistant, InsertDraggedFiles,
|
||||
InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelPickerDelegate,
|
||||
ModelSelector, NewContext, PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection,
|
||||
RemoteContextMetadata, RequestType, SavedContextMetadata, Split, ToggleFocus,
|
||||
ToggleModelSelector,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutputSection};
|
||||
@@ -26,8 +27,8 @@ use collections::{BTreeSet, HashMap, HashSet};
|
||||
use editor::{
|
||||
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
|
||||
display_map::{
|
||||
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease,
|
||||
CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
|
||||
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
|
||||
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
|
||||
},
|
||||
scroll::{Autoscroll, AutoscrollStrategy},
|
||||
Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt,
|
||||
@@ -963,7 +964,7 @@ impl AssistantPanel {
|
||||
|
||||
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
|
||||
let project = self.project.read(cx);
|
||||
if project.is_via_collab() && project.dev_server_project_id().is_none() {
|
||||
if project.is_via_collab() {
|
||||
let task = self
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
@@ -1461,6 +1462,7 @@ type MessageHeader = MessageMetadata;
|
||||
|
||||
#[derive(Clone)]
|
||||
enum AssistError {
|
||||
FileRequired,
|
||||
PaymentRequired,
|
||||
MaxMonthlySpendReached,
|
||||
Message(SharedString),
|
||||
@@ -1588,23 +1590,11 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
if provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx))
|
||||
{
|
||||
self.show_accept_terms = true;
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
self.send_to_model(RequestType::Chat, cx);
|
||||
}
|
||||
|
||||
if self.focus_active_patch(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.last_error = None;
|
||||
self.send_to_model(cx);
|
||||
cx.notify();
|
||||
fn edit(&mut self, _: &Edit, cx: &mut ViewContext<Self>) {
|
||||
self.send_to_model(RequestType::SuggestEdits, cx);
|
||||
}
|
||||
|
||||
fn focus_active_patch(&mut self, cx: &mut ViewContext<Self>) -> bool {
|
||||
@@ -1622,8 +1612,30 @@ impl ContextEditor {
|
||||
false
|
||||
}
|
||||
|
||||
fn send_to_model(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) {
|
||||
fn send_to_model(&mut self, request_type: RequestType, cx: &mut ViewContext<Self>) {
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
if provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx))
|
||||
{
|
||||
self.show_accept_terms = true;
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
|
||||
if self.focus_active_patch(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
self.last_error = None;
|
||||
|
||||
if request_type == RequestType::SuggestEdits && !self.context.read(cx).contains_files(cx) {
|
||||
self.last_error = Some(AssistError::FileRequired);
|
||||
cx.notify();
|
||||
} else if let Some(user_message) = self
|
||||
.context
|
||||
.update(cx, |context, cx| context.assist(request_type, cx))
|
||||
{
|
||||
let new_selection = {
|
||||
let cursor = user_message
|
||||
.start
|
||||
@@ -1640,6 +1652,8 @@ impl ContextEditor {
|
||||
// Avoid scrolling to the new cursor position so the assistant's output is stable.
|
||||
cx.defer(|this, _| this.scroll_position = None);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext<Self>) {
|
||||
@@ -1667,8 +1681,10 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
|
||||
fn cursors(&self, cx: &AppContext) -> Vec<usize> {
|
||||
let selections = self.editor.read(cx).selections.all::<usize>(cx);
|
||||
fn cursors(&self, cx: &mut WindowContext) -> Vec<usize> {
|
||||
let selections = self
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.selections.all::<usize>(cx));
|
||||
selections
|
||||
.into_iter()
|
||||
.map(|selection| selection.head())
|
||||
@@ -2009,13 +2025,12 @@ impl ContextEditor {
|
||||
})
|
||||
.map(|(command, error_message)| BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
position: Anchor {
|
||||
height: 1,
|
||||
placement: BlockPlacement::Below(Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: command.source_range.start,
|
||||
},
|
||||
height: 1,
|
||||
disposition: BlockDisposition::Below,
|
||||
}),
|
||||
render: slash_command_error_block_renderer(error_message),
|
||||
priority: 0,
|
||||
}),
|
||||
@@ -2242,11 +2257,10 @@ impl ContextEditor {
|
||||
} else {
|
||||
let block_ids = editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
position: patch_start,
|
||||
height: path_count as u32 + 1,
|
||||
style: BlockStyle::Flex,
|
||||
render: render_block,
|
||||
disposition: BlockDisposition::Below,
|
||||
placement: BlockPlacement::Below(patch_start),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
@@ -2377,7 +2391,9 @@ impl ContextEditor {
|
||||
}
|
||||
|
||||
fn update_active_patch(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let newest_cursor = self.editor.read(cx).selections.newest::<Point>(cx).head();
|
||||
let newest_cursor = self.editor.update(cx, |editor, cx| {
|
||||
editor.selections.newest::<Point>(cx).head()
|
||||
});
|
||||
let context = self.context.read(cx);
|
||||
|
||||
let new_patch = context.patch_containing(newest_cursor, cx).cloned();
|
||||
@@ -2452,7 +2468,7 @@ impl ContextEditor {
|
||||
|
||||
let editor = cx.new_view(|cx| {
|
||||
let editor = ProposedChangesEditor::new(
|
||||
patch.title.clone(),
|
||||
patch.title.clone().into(),
|
||||
resolved_patch
|
||||
.edit_groups
|
||||
.iter()
|
||||
@@ -2731,12 +2747,13 @@ impl ContextEditor {
|
||||
})
|
||||
};
|
||||
let create_block_properties = |message: &Message| BlockProperties {
|
||||
position: buffer
|
||||
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
|
||||
.unwrap(),
|
||||
height: 2,
|
||||
style: BlockStyle::Sticky,
|
||||
disposition: BlockDisposition::Above,
|
||||
placement: BlockPlacement::Above(
|
||||
buffer
|
||||
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
|
||||
.unwrap(),
|
||||
),
|
||||
priority: usize::MAX,
|
||||
render: render_block(MessageMetadata::from(message)),
|
||||
};
|
||||
@@ -2783,39 +2800,40 @@ impl ContextEditor {
|
||||
) -> Option<(String, bool)> {
|
||||
const CODE_FENCE_DELIMITER: &'static str = "```";
|
||||
|
||||
let context_editor = context_editor_view.read(cx).editor.read(cx);
|
||||
let context_editor = context_editor_view.read(cx).editor.clone();
|
||||
context_editor.update(cx, |context_editor, cx| {
|
||||
if context_editor.selections.newest::<Point>(cx).is_empty() {
|
||||
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
|
||||
let (_, _, snapshot) = snapshot.as_singleton()?;
|
||||
|
||||
if context_editor.selections.newest::<Point>(cx).is_empty() {
|
||||
let snapshot = context_editor.buffer().read(cx).snapshot(cx);
|
||||
let (_, _, snapshot) = snapshot.as_singleton()?;
|
||||
let head = context_editor.selections.newest::<Point>(cx).head();
|
||||
let offset = snapshot.point_to_offset(head);
|
||||
|
||||
let head = context_editor.selections.newest::<Point>(cx).head();
|
||||
let offset = snapshot.point_to_offset(head);
|
||||
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
|
||||
let mut text = snapshot
|
||||
.text_for_range(surrounding_code_block_range)
|
||||
.collect::<String>();
|
||||
|
||||
let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?;
|
||||
let mut text = snapshot
|
||||
.text_for_range(surrounding_code_block_range)
|
||||
.collect::<String>();
|
||||
// If there is no newline trailing the closing three-backticks, then
|
||||
// tree-sitter-md extends the range of the content node to include
|
||||
// the backticks.
|
||||
if text.ends_with(CODE_FENCE_DELIMITER) {
|
||||
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
|
||||
}
|
||||
|
||||
// If there is no newline trailing the closing three-backticks, then
|
||||
// tree-sitter-md extends the range of the content node to include
|
||||
// the backticks.
|
||||
if text.ends_with(CODE_FENCE_DELIMITER) {
|
||||
text.drain((text.len() - CODE_FENCE_DELIMITER.len())..);
|
||||
(!text.is_empty()).then_some((text, true))
|
||||
} else {
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, false))
|
||||
}
|
||||
|
||||
(!text.is_empty()).then_some((text, true))
|
||||
} else {
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
(!text.is_empty()).then_some((text, false))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn insert_selection(
|
||||
@@ -3372,7 +3390,7 @@ impl ContextEditor {
|
||||
let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap();
|
||||
let image = render_image.clone();
|
||||
anchor.is_valid(&buffer).then(|| BlockProperties {
|
||||
position: anchor,
|
||||
placement: BlockPlacement::Above(anchor),
|
||||
height: MAX_HEIGHT_IN_LINES,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(move |cx| {
|
||||
@@ -3393,8 +3411,6 @@ impl ContextEditor {
|
||||
)
|
||||
.into_any_element()
|
||||
}),
|
||||
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
})
|
||||
})
|
||||
@@ -3647,7 +3663,13 @@ impl ContextEditor {
|
||||
button.tooltip(move |_| tooltip.clone())
|
||||
})
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.child(Label::new("Send"))
|
||||
.child(Label::new(
|
||||
if AssistantSettings::get_global(cx).are_live_diffs_enabled(cx) {
|
||||
"Chat"
|
||||
} else {
|
||||
"Send"
|
||||
},
|
||||
))
|
||||
.children(
|
||||
KeyBinding::for_action_in(&Assist, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element()),
|
||||
@@ -3657,6 +3679,57 @@ impl ContextEditor {
|
||||
})
|
||||
}
|
||||
|
||||
fn render_edit_button(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle(cx).clone();
|
||||
|
||||
let (style, tooltip) = match token_state(&self.context, cx) {
|
||||
Some(TokenState::NoTokensLeft { .. }) => (
|
||||
ButtonStyle::Tinted(TintColor::Negative),
|
||||
Some(Tooltip::text("Token limit reached", cx)),
|
||||
),
|
||||
Some(TokenState::HasMoreTokens {
|
||||
over_warn_threshold,
|
||||
..
|
||||
}) => {
|
||||
let (style, tooltip) = if over_warn_threshold {
|
||||
(
|
||||
ButtonStyle::Tinted(TintColor::Warning),
|
||||
Some(Tooltip::text("Token limit is close to exhaustion", cx)),
|
||||
)
|
||||
} else {
|
||||
(ButtonStyle::Filled, None)
|
||||
};
|
||||
(style, tooltip)
|
||||
}
|
||||
None => (ButtonStyle::Filled, None),
|
||||
};
|
||||
|
||||
let provider = LanguageModelRegistry::read_global(cx).active_provider();
|
||||
|
||||
let has_configuration_error = configuration_error(cx).is_some();
|
||||
let needs_to_accept_terms = self.show_accept_terms
|
||||
&& provider
|
||||
.as_ref()
|
||||
.map_or(false, |provider| provider.must_accept_terms(cx));
|
||||
let disabled = has_configuration_error || needs_to_accept_terms;
|
||||
|
||||
ButtonLike::new("edit_button")
|
||||
.disabled(disabled)
|
||||
.style(style)
|
||||
.when_some(tooltip, |button, tooltip| {
|
||||
button.tooltip(move |_| tooltip.clone())
|
||||
})
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.child(Label::new("Suggest Edits"))
|
||||
.children(
|
||||
KeyBinding::for_action_in(&Edit, &focus_handle, cx)
|
||||
.map(|binding| binding.into_any_element()),
|
||||
)
|
||||
.on_click(move |_event, cx| {
|
||||
focus_handle.dispatch_action(&Edit, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn render_last_error(&self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
|
||||
let last_error = self.last_error.as_ref()?;
|
||||
|
||||
@@ -3671,6 +3744,7 @@ impl ContextEditor {
|
||||
.elevation_2(cx)
|
||||
.occlude()
|
||||
.child(match last_error {
|
||||
AssistError::FileRequired => self.render_file_required_error(cx),
|
||||
AssistError::PaymentRequired => self.render_payment_required_error(cx),
|
||||
AssistError::MaxMonthlySpendReached => {
|
||||
self.render_max_monthly_spend_reached_error(cx)
|
||||
@@ -3683,6 +3757,41 @@ impl ContextEditor {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_file_required_error(&self, cx: &mut ViewContext<Self>) -> AnyElement {
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::Warning).color(Color::Warning))
|
||||
.child(
|
||||
Label::new("Suggest Edits needs a file to edit").weight(FontWeight::MEDIUM),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(
|
||||
"To include files, type /file or /tab in your prompt.",
|
||||
)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.last_error = None;
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_payment_required_error(&self, cx: &mut ViewContext<Self>) -> AnyElement {
|
||||
const ERROR_MESSAGE: &str = "Free tier exceeded. Subscribe and add payment to continue using Zed LLMs. You'll be billed at cost for tokens used.";
|
||||
|
||||
@@ -3913,6 +4022,7 @@ impl Render for ContextEditor {
|
||||
.capture_action(cx.listener(ContextEditor::paste))
|
||||
.capture_action(cx.listener(ContextEditor::cycle_message_role))
|
||||
.capture_action(cx.listener(ContextEditor::confirm_command))
|
||||
.on_action(cx.listener(ContextEditor::edit))
|
||||
.on_action(cx.listener(ContextEditor::assist))
|
||||
.on_action(cx.listener(ContextEditor::split))
|
||||
.size_full()
|
||||
@@ -3949,7 +4059,7 @@ impl Render for ContextEditor {
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.gap_1()
|
||||
.child(render_inject_context_menu(cx.view().downgrade(), cx))
|
||||
.child(
|
||||
IconButton::new("quote-button", IconName::Quote)
|
||||
@@ -3977,7 +4087,21 @@ impl Render for ContextEditor {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_end()
|
||||
.child(div().child(self.render_send_button(cx))),
|
||||
.when(
|
||||
AssistantSettings::get_global(cx).are_live_diffs_enabled(cx),
|
||||
|buttons| {
|
||||
buttons
|
||||
.items_center()
|
||||
.gap_1p5()
|
||||
.child(self.render_edit_button(cx))
|
||||
.child(
|
||||
Label::new("or")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
},
|
||||
)
|
||||
.child(self.render_send_button(cx)),
|
||||
),
|
||||
),
|
||||
)
|
||||
@@ -4249,11 +4373,11 @@ fn render_inject_context_menu(
|
||||
slash_command_picker::SlashCommandSelector::new(
|
||||
commands.clone(),
|
||||
active_context_editor,
|
||||
IconButton::new("trigger", IconName::SlashSquare)
|
||||
Button::new("trigger", "Add Context")
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta("Insert Context", None, "Type / to insert via keyboard", cx)
|
||||
}),
|
||||
.icon_position(IconPosition::Start)
|
||||
.tooltip(|cx| Tooltip::text("Type / to insert via keyboard", cx)),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -4710,7 +4834,7 @@ impl Render for ConfigurationView {
|
||||
|
||||
let mut element = v_flex()
|
||||
.id("assistant-configuration-view")
|
||||
.track_focus(&self.focus_handle)
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
mod context_tests;
|
||||
|
||||
use crate::{
|
||||
prompts::PromptBuilder, slash_command::SlashCommandLine, AssistantEdit, AssistantPatch,
|
||||
AssistantPatchStatus, MessageId, MessageStatus,
|
||||
prompts::PromptBuilder,
|
||||
slash_command::{file_command::FileCommandMetadata, SlashCommandLine},
|
||||
AssistantEdit, AssistantPatch, AssistantPatchStatus, MessageId, MessageStatus,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{
|
||||
SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
|
||||
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
|
||||
};
|
||||
use assistant_tool::ToolRegistry;
|
||||
use client::{self, proto, telemetry::Telemetry};
|
||||
@@ -66,6 +67,14 @@ impl ContextId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum RequestType {
|
||||
/// Request a normal chat response from the model.
|
||||
Chat,
|
||||
/// Add a preamble to the message, which tells the model to return a structured response that suggests edits.
|
||||
SuggestEdits,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ContextOperation {
|
||||
InsertMessage {
|
||||
@@ -981,6 +990,20 @@ impl Context {
|
||||
&self.slash_command_output_sections
|
||||
}
|
||||
|
||||
pub fn contains_files(&self, cx: &AppContext) -> bool {
|
||||
let buffer = self.buffer.read(cx);
|
||||
self.slash_command_output_sections.iter().any(|section| {
|
||||
section.is_valid(buffer)
|
||||
&& section
|
||||
.metadata
|
||||
.as_ref()
|
||||
.and_then(|metadata| {
|
||||
serde_json::from_value::<FileCommandMetadata>(metadata.clone()).ok()
|
||||
})
|
||||
.is_some()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.pending_tool_uses_by_id.values().collect()
|
||||
}
|
||||
@@ -1028,7 +1051,7 @@ impl Context {
|
||||
}
|
||||
|
||||
pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let request = self.to_completion_request(cx);
|
||||
let request = self.to_completion_request(RequestType::SuggestEdits, cx); // Conservatively assume SuggestEdits, since it takes more tokens.
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
@@ -1171,7 +1194,7 @@ impl Context {
|
||||
}
|
||||
|
||||
let request = {
|
||||
let mut req = self.to_completion_request(cx);
|
||||
let mut req = self.to_completion_request(RequestType::Chat, cx);
|
||||
// Skip the last message because it's likely to change and
|
||||
// therefore would be a waste to cache.
|
||||
req.messages.pop();
|
||||
@@ -1688,19 +1711,13 @@ impl Context {
|
||||
let command_range = command_range.clone();
|
||||
async move {
|
||||
let output = output.await;
|
||||
let output = match output {
|
||||
Ok(output) => SlashCommandOutput::from_event_stream(output).await,
|
||||
Err(err) => Err(err),
|
||||
};
|
||||
this.update(&mut cx, |this, cx| match output {
|
||||
Ok(mut output) => {
|
||||
// Ensure section ranges are valid.
|
||||
for section in &mut output.sections {
|
||||
section.range.start = section.range.start.min(output.text.len());
|
||||
section.range.end = section.range.end.min(output.text.len());
|
||||
while !output.text.is_char_boundary(section.range.start) {
|
||||
section.range.start -= 1;
|
||||
}
|
||||
while !output.text.is_char_boundary(section.range.end) {
|
||||
section.range.end += 1;
|
||||
}
|
||||
}
|
||||
output.ensure_valid_section_ranges();
|
||||
|
||||
// Ensure there is a newline after the last section.
|
||||
if ensure_trailing_newline {
|
||||
@@ -1865,7 +1882,11 @@ impl Context {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<MessageAnchor> {
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
request_type: RequestType,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<MessageAnchor> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider = model_registry.active_provider()?;
|
||||
let model = model_registry.active_model()?;
|
||||
@@ -1878,7 +1899,7 @@ impl Context {
|
||||
// Compute which messages to cache, including the last one.
|
||||
self.mark_cache_anchors(&model.cache_configuration(), false, cx);
|
||||
|
||||
let mut request = self.to_completion_request(cx);
|
||||
let mut request = self.to_completion_request(request_type, cx);
|
||||
|
||||
if cx.has_flag::<ToolUseFeatureFlag>() {
|
||||
let tool_registry = ToolRegistry::global(cx);
|
||||
@@ -2080,7 +2101,11 @@ impl Context {
|
||||
Some(user_message)
|
||||
}
|
||||
|
||||
pub fn to_completion_request(&self, cx: &AppContext) -> LanguageModelRequest {
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
request_type: RequestType,
|
||||
cx: &AppContext,
|
||||
) -> LanguageModelRequest {
|
||||
let buffer = self.buffer.read(cx);
|
||||
|
||||
let mut contents = self.contents(cx).peekable();
|
||||
@@ -2169,6 +2194,25 @@ impl Context {
|
||||
completion_request.messages.push(request_message);
|
||||
}
|
||||
|
||||
if let RequestType::SuggestEdits = request_type {
|
||||
if let Ok(preamble) = self.prompt_builder.generate_workflow_prompt() {
|
||||
let last_elem_index = completion_request.messages.len();
|
||||
|
||||
completion_request
|
||||
.messages
|
||||
.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(preamble)],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
// The preamble message should be sent right before the last actual user message.
|
||||
completion_request
|
||||
.messages
|
||||
.swap(last_elem_index, last_elem_index.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
|
||||
completion_request
|
||||
}
|
||||
|
||||
@@ -2483,7 +2527,7 @@ impl Context {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut request = self.to_completion_request(cx);
|
||||
let mut request = self.to_completion_request(RequestType::Chat, cx);
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
|
||||
@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn one".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: "add a `two` function".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: "add a `two` function".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: "add a `two` function".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: "add a `two` function".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
@@ -1097,7 +1097,8 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
|
||||
text: output_text,
|
||||
sections,
|
||||
run_commands_in_text: false,
|
||||
})),
|
||||
}
|
||||
.to_event_stream())),
|
||||
true,
|
||||
false,
|
||||
cx,
|
||||
@@ -1421,6 +1422,7 @@ impl SlashCommand for FakeSlashCommand {
|
||||
text: format!("Executed fake command: {}", self.0),
|
||||
sections: vec![],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
}
|
||||
.to_event_stream()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder,
|
||||
AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist,
|
||||
CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff,
|
||||
CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, RequestType, StreamingDiff,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{telemetry::Telemetry, ErrorExt};
|
||||
@@ -9,7 +9,7 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
|
||||
use editor::{
|
||||
actions::{MoveDown, MoveUp, SelectAll},
|
||||
display_map::{
|
||||
BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
ToDisplayPoint,
|
||||
},
|
||||
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode,
|
||||
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use util::{RangeExt, ResultExt};
|
||||
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
|
||||
|
||||
@@ -189,11 +189,16 @@ impl InlineAssistant {
|
||||
initial_prompt: Option<String>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
editor.selections.all::<Point>(cx),
|
||||
)
|
||||
});
|
||||
|
||||
let mut selections = Vec::<Selection<Point>>::new();
|
||||
let mut newest_selection = None;
|
||||
for mut selection in editor.read(cx).selections.all::<Point>(cx) {
|
||||
for mut selection in initial_selections {
|
||||
if selection.end > selection.start {
|
||||
selection.start.column = 0;
|
||||
// If the selection ends at the start of the line, we don't want to include it.
|
||||
@@ -446,15 +451,14 @@ impl InlineAssistant {
|
||||
let assist_blocks = vec![
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.start,
|
||||
placement: BlockPlacement::Above(range.start),
|
||||
height: prompt_editor_height,
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
position: range.end,
|
||||
placement: BlockPlacement::Below(range.end),
|
||||
height: 0,
|
||||
render: Box::new(|cx| {
|
||||
v_flex()
|
||||
@@ -464,7 +468,6 @@ impl InlineAssistant {
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
priority: 0,
|
||||
},
|
||||
];
|
||||
@@ -568,10 +571,13 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
let editor = editor.read(cx);
|
||||
if editor.selections.count() == 1 {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
if editor.read(cx).selections.count() == 1 {
|
||||
let (selection, buffer) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.selections.newest::<usize>(cx),
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
)
|
||||
});
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
let assist_range = assist.range.to_offset(&buffer);
|
||||
@@ -596,10 +602,13 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
let editor = editor.read(cx);
|
||||
if editor.selections.count() == 1 {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
if editor.read(cx).selections.count() == 1 {
|
||||
let (selection, buffer) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.selections.newest::<usize>(cx),
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
)
|
||||
});
|
||||
let mut closest_assist_fallback = None;
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
@@ -1179,7 +1188,7 @@ impl InlineAssistant {
|
||||
let height =
|
||||
deleted_lines_editor.update(cx, |editor, cx| editor.max_point(cx).row().0 + 1);
|
||||
new_blocks.push(BlockProperties {
|
||||
position: new_row,
|
||||
placement: BlockPlacement::Above(new_row),
|
||||
height,
|
||||
style: BlockStyle::Flex,
|
||||
render: Box::new(move |cx| {
|
||||
@@ -1191,7 +1200,6 @@ impl InlineAssistant {
|
||||
.child(deleted_lines_editor.clone())
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
});
|
||||
}
|
||||
@@ -1599,7 +1607,7 @@ impl PromptEditor {
|
||||
// always show the cursor (even when it isn't focused) because
|
||||
// typing in one will make what you typed appear in all of them.
|
||||
editor.set_show_cursor_when_unfocused(true, cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
|
||||
editor
|
||||
});
|
||||
|
||||
@@ -1656,6 +1664,7 @@ impl PromptEditor {
|
||||
self.editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
|
||||
editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor.set_text(prompt, cx);
|
||||
if focus {
|
||||
@@ -1666,6 +1675,20 @@ impl PromptEditor {
|
||||
self.subscribe_to_editor(cx);
|
||||
}
|
||||
|
||||
fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
|
||||
let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
|
||||
.map(|keybinding| format!(" • {keybinding} for context"))
|
||||
.unwrap_or_default();
|
||||
|
||||
let action = if codegen.is_insertion {
|
||||
"Generate"
|
||||
} else {
|
||||
"Transform"
|
||||
};
|
||||
|
||||
format!("{action}…{context_keybinding} • ↓↑ for history")
|
||||
}
|
||||
|
||||
fn prompt(&self, cx: &AppContext) -> String {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
@@ -2222,7 +2245,7 @@ impl InlineAssist {
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(cx),
|
||||
.to_completion_request(RequestType::Chat, cx),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@@ -2263,6 +2286,7 @@ pub struct Codegen {
|
||||
initial_transaction_id: Option<TransactionId>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
builder: Arc<PromptBuilder>,
|
||||
is_insertion: bool,
|
||||
}
|
||||
|
||||
impl Codegen {
|
||||
@@ -2285,6 +2309,7 @@ impl Codegen {
|
||||
)
|
||||
});
|
||||
let mut this = Self {
|
||||
is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
|
||||
alternatives: vec![codegen],
|
||||
active_alternative: 0,
|
||||
seen_alternatives: HashSet::default(),
|
||||
@@ -2686,7 +2711,7 @@ impl CodegenAlternative {
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_content_prompt(user_prompt, language_name, buffer, range)
|
||||
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
@@ -158,39 +158,34 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.selected(selected)
|
||||
.start_slot(
|
||||
div().pr_1().child(
|
||||
div().pr_0p5().child(
|
||||
Icon::new(model_info.icon)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Medium),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.font_buffer(cx)
|
||||
.min_w(px(240.))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new(model_info.model.name().0.clone()))
|
||||
.child(
|
||||
Label::new(provider_name)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(match model_info.availability {
|
||||
LanguageModelAvailability::Public => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
|
||||
show_badges.then(|| {
|
||||
Label::new("Pro")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
})
|
||||
}
|
||||
}),
|
||||
),
|
||||
h_flex().w_full().justify_between().min_w(px(200.)).child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(Label::new(model_info.model.name().0.clone()))
|
||||
.child(
|
||||
Label::new(provider_name)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(match model_info.availability {
|
||||
LanguageModelAvailability::Public => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
|
||||
show_badges.then(|| {
|
||||
Label::new("Pro")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
})
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.end_slot(div().when(model_info.is_selected, |this| {
|
||||
this.child(
|
||||
@@ -212,7 +207,7 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
|
||||
@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
|
||||
Update {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Create {
|
||||
new_text: String,
|
||||
description: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertBefore {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertAfter {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Delete {
|
||||
old_text: String,
|
||||
@@ -86,19 +86,37 @@ enum SearchDirection {
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
// A measure of the currently quality of an in-progress fuzzy search.
|
||||
//
|
||||
// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
|
||||
// operation in the search.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
score: u32,
|
||||
cost: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(score: u32, direction: SearchDirection) -> Self {
|
||||
Self { score, direction }
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,23 +205,23 @@ impl AssistantEdit {
|
||||
"update" => AssistantEditKind::Update {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
description,
|
||||
},
|
||||
"insert_before" => AssistantEditKind::InsertBefore {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
description,
|
||||
},
|
||||
"insert_after" => AssistantEditKind::InsertAfter {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
description,
|
||||
},
|
||||
"delete" => AssistantEditKind::Delete {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
},
|
||||
"create" => AssistantEditKind::Create {
|
||||
description: description.ok_or_else(|| anyhow!("missing description"))?,
|
||||
description,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
},
|
||||
_ => Err(anyhow!("unknown operation {operation:?}"))?,
|
||||
@@ -264,7 +282,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text,
|
||||
description: Some(description),
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Create {
|
||||
@@ -272,7 +290,7 @@ impl AssistantEditKind {
|
||||
description,
|
||||
} => ResolvedEdit {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
description: Some(description),
|
||||
description,
|
||||
new_text,
|
||||
},
|
||||
Self::InsertBefore {
|
||||
@@ -285,7 +303,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.start..range.start,
|
||||
new_text,
|
||||
description: Some(description),
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::InsertAfter {
|
||||
@@ -298,7 +316,7 @@ impl AssistantEditKind {
|
||||
ResolvedEdit {
|
||||
range: range.end..range.end,
|
||||
new_text,
|
||||
description: Some(description),
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Delete { old_text } => {
|
||||
@@ -314,44 +332,29 @@ impl AssistantEditKind {
|
||||
|
||||
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const DELETION_COST: u32 = 3;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
const EQUALITY_BONUS: u32 = 5;
|
||||
|
||||
struct Matrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl Matrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
Matrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
|
||||
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
@@ -359,38 +362,35 @@ impl AssistantEditKind {
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix
|
||||
.get(row + 1, col)
|
||||
.score
|
||||
.saturating_sub(insertion_cost),
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
|
||||
matrix.get(row, col).cost
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.score
|
||||
.saturating_sub(deletion_cost + insertion_cost)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_score = 0;
|
||||
let mut best_cost = u32::MAX;
|
||||
for col in 1..=buffer_len {
|
||||
let score = matrix.get(query_len, col).score;
|
||||
if score > best_score {
|
||||
best_score = score;
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
@@ -560,89 +560,84 @@ mod tests {
|
||||
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use text::{OffsetRangeExt, Point};
|
||||
use ui::BorrowAppContext;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_location(cx: &mut AppContext) {
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
" ipsum\n",
|
||||
" dolor sit amet\n",
|
||||
" consecteur",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(2, 18)
|
||||
);
|
||||
}
|
||||
assert_location_resolution(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn foo1(a: usize) -> usize {\n",
|
||||
" 40\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
|
||||
.to_point(&snapshot),
|
||||
Point::new(0, 0)..Point::new(2, 1)
|
||||
);
|
||||
}
|
||||
assert_location_resolution(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
|
||||
{
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::local(
|
||||
concat!(
|
||||
"fn main() {\n",
|
||||
" Foo\n",
|
||||
" .bar()\n",
|
||||
" .baz()\n",
|
||||
" .qux()\n",
|
||||
"}\n",
|
||||
"\n",
|
||||
"fn foo2(b: usize) -> usize {\n",
|
||||
" 42\n",
|
||||
"}\n",
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
assert_eq!(
|
||||
AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
|
||||
.to_point(&snapshot),
|
||||
Point::new(1, 0)..Point::new(4, 14)
|
||||
);
|
||||
}
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_edits(cx: &mut AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
init_test(cx);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
@@ -675,7 +670,7 @@ mod tests {
|
||||
last_name: String,
|
||||
"
|
||||
.unindent(),
|
||||
description: "".into(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -690,7 +685,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: "".into(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -734,7 +729,7 @@ mod tests {
|
||||
qux();
|
||||
}"
|
||||
.unindent(),
|
||||
description: "implement bar".into(),
|
||||
description: Some("implement bar".into()),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -747,7 +742,7 @@ mod tests {
|
||||
bar();
|
||||
}"
|
||||
.unindent(),
|
||||
description: "call bar in foo".into(),
|
||||
description: Some("call bar in foo".into()),
|
||||
},
|
||||
AssistantEditKind::InsertAfter {
|
||||
old_text: "
|
||||
@@ -762,7 +757,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: "implement qux".into(),
|
||||
description: Some("implement qux".into()),
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -814,7 +809,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: "pick better number".into(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -829,7 +824,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: "pick better number".into(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
@@ -844,7 +839,7 @@ mod tests {
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: "pick better number".into(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
@@ -865,6 +860,69 @@ mod tests {
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self.name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "self.name = name;".unindent(),
|
||||
new_text: "self._name = name;".unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "return self.name;\n".unindent(),
|
||||
new_text: "return self._name;\n".unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self._name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self._name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(
|
||||
text_with_expected_range: &str,
|
||||
query: &str,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
||||
@@ -204,7 +204,7 @@ impl PromptBuilder {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_content_prompt(
|
||||
pub fn generate_inline_transformation_prompt(
|
||||
&self,
|
||||
user_prompt: String,
|
||||
language_name: Option<&LanguageName>,
|
||||
@@ -311,7 +311,7 @@ impl PromptBuilder {
|
||||
}
|
||||
|
||||
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
self.handlebars.lock().render("suggest_edits", &())
|
||||
}
|
||||
|
||||
pub fn generate_project_slash_command_prompt(
|
||||
|
||||
@@ -34,7 +34,6 @@ pub mod search_command;
|
||||
pub mod symbols_command;
|
||||
pub mod tab_command;
|
||||
pub mod terminal_command;
|
||||
pub mod workflow_command;
|
||||
|
||||
pub(crate) struct SlashCommandCompletionProvider {
|
||||
cancel_flag: Mutex<Arc<AtomicBool>>,
|
||||
|
||||
@@ -147,7 +147,8 @@ impl SlashCommand for AutoCommand {
|
||||
text: prompt,
|
||||
sections: Vec::new(),
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,7 +147,8 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
|
||||
@@ -185,7 +185,8 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
}],
|
||||
text: prompt,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
|
||||
@@ -78,7 +78,8 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
}],
|
||||
text,
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
@@ -38,7 +38,7 @@ impl SlashCommand for DeltaSlashCommand {
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
unimplemented!()
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn run(
|
||||
@@ -86,25 +86,28 @@ impl SlashCommand for DeltaSlashCommand {
|
||||
.zip(file_command_new_outputs)
|
||||
{
|
||||
if let Ok(new_output) = new_output {
|
||||
if let Some(file_command_range) = new_output.sections.first() {
|
||||
let new_text = &new_output.text[file_command_range.range.clone()];
|
||||
if old_text.chars().ne(new_text.chars()) {
|
||||
output.sections.extend(new_output.sections.into_iter().map(
|
||||
|section| SlashCommandOutputSection {
|
||||
range: output.text.len() + section.range.start
|
||||
..output.text.len() + section.range.end,
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata,
|
||||
},
|
||||
));
|
||||
output.text.push_str(&new_output.text);
|
||||
if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await
|
||||
{
|
||||
if let Some(file_command_range) = new_output.sections.first() {
|
||||
let new_text = &new_output.text[file_command_range.range.clone()];
|
||||
if old_text.chars().ne(new_text.chars()) {
|
||||
output.sections.extend(new_output.sections.into_iter().map(
|
||||
|section| SlashCommandOutputSection {
|
||||
range: output.text.len() + section.range.start
|
||||
..output.text.len() + section.range.end,
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata,
|
||||
},
|
||||
));
|
||||
output.text.push_str(&new_output.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
Ok(output.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,7 +180,11 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
|
||||
cx.spawn(move |_| async move {
|
||||
task.await?
|
||||
.map(|output| output.to_event_stream())
|
||||
.ok_or_else(|| anyhow!("No diagnostics found"))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -356,7 +356,8 @@ impl SlashCommand for DocsSlashCommand {
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,7 +167,8 @@ impl SlashCommand for FetchSlashCommand {
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{
|
||||
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
|
||||
SlashCommandOutputSection, SlashCommandResult,
|
||||
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent,
|
||||
SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult,
|
||||
};
|
||||
use futures::channel::mpsc;
|
||||
use futures::Stream;
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{AppContext, Model, Task, View, WeakView};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use project::{PathMatchCandidateSet, Project};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
ops::{Range, RangeInclusive},
|
||||
@@ -194,7 +197,12 @@ impl SlashCommand for FileSlashCommand {
|
||||
return Task::ready(Err(anyhow!("missing path")));
|
||||
};
|
||||
|
||||
collect_files(workspace.read(cx).project().clone(), arguments, cx)
|
||||
Task::ready(Ok(collect_files(
|
||||
workspace.read(cx).project().clone(),
|
||||
arguments,
|
||||
cx,
|
||||
)
|
||||
.boxed()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,7 +210,7 @@ fn collect_files(
|
||||
project: Model<Project>,
|
||||
glob_inputs: &[String],
|
||||
cx: &mut AppContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
) -> impl Stream<Item = Result<SlashCommandEvent>> {
|
||||
let Ok(matchers) = glob_inputs
|
||||
.into_iter()
|
||||
.map(|glob_input| {
|
||||
@@ -211,7 +219,7 @@ fn collect_files(
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("invalid path")));
|
||||
return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
|
||||
};
|
||||
|
||||
let project_handle = project.downgrade();
|
||||
@@ -221,11 +229,11 @@ fn collect_files(
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let (events_tx, events_rx) = mpsc::unbounded();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
for snapshot in snapshots {
|
||||
let worktree_id = snapshot.id();
|
||||
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
|
||||
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
|
||||
let mut folded_directory_names_stack = Vec::new();
|
||||
let mut is_top_level_directory = true;
|
||||
|
||||
@@ -241,17 +249,19 @@ fn collect_files(
|
||||
continue;
|
||||
}
|
||||
|
||||
while let Some((dir, _, _)) = directory_stack.last() {
|
||||
while let Some(dir) = directory_stack.last() {
|
||||
if entry.path.starts_with(dir) {
|
||||
break;
|
||||
}
|
||||
let (_, entry_name, start) = directory_stack.pop().unwrap();
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len().saturating_sub(1),
|
||||
Some(&PathBuf::from(entry_name)),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
directory_stack.pop().unwrap();
|
||||
events_tx
|
||||
.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
}
|
||||
|
||||
let filename = entry
|
||||
@@ -283,23 +293,46 @@ fn collect_files(
|
||||
continue;
|
||||
}
|
||||
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
|
||||
let entry_start = output.text.len();
|
||||
if prefix_paths.is_empty() {
|
||||
if is_top_level_directory {
|
||||
output
|
||||
.text
|
||||
.push_str(&path_including_worktree_name.to_string_lossy());
|
||||
let label = if is_top_level_directory {
|
||||
is_top_level_directory = false;
|
||||
path_including_worktree_name.to_string_lossy().to_string()
|
||||
} else {
|
||||
output.text.push_str(&filename);
|
||||
}
|
||||
directory_stack.push((entry.path.clone(), filename, entry_start));
|
||||
filename
|
||||
};
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
|
||||
icon: IconName::Folder,
|
||||
label: label.clone().into(),
|
||||
metadata: None,
|
||||
}))?;
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: label,
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
directory_stack.push(entry.path.clone());
|
||||
} else {
|
||||
let entry_name = format!("{}/{}", prefix_paths, &filename);
|
||||
output.text.push_str(&entry_name);
|
||||
directory_stack.push((entry.path.clone(), entry_name, entry_start));
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
|
||||
icon: IconName::Folder,
|
||||
label: entry_name.clone().into(),
|
||||
metadata: None,
|
||||
}))?;
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: entry_name,
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
directory_stack.push(entry.path.clone());
|
||||
}
|
||||
output.text.push('\n');
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
|
||||
SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false,
|
||||
},
|
||||
)))?;
|
||||
} else if entry.is_file() {
|
||||
let Some(open_buffer_task) = project_handle
|
||||
.update(&mut cx, |project, cx| {
|
||||
@@ -310,6 +343,7 @@ fn collect_files(
|
||||
continue;
|
||||
};
|
||||
if let Some(buffer) = open_buffer_task.await.log_err() {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
|
||||
append_buffer_to_output(
|
||||
&snapshot,
|
||||
@@ -317,33 +351,24 @@ fn collect_files(
|
||||
&mut output,
|
||||
)
|
||||
.log_err();
|
||||
let mut buffer_events = output.to_event_stream();
|
||||
while let Some(event) = buffer_events.next().await {
|
||||
events_tx.unbounded_send(event)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some((dir, entry, start)) = directory_stack.pop() {
|
||||
if directory_stack.is_empty() {
|
||||
let mut root_path = PathBuf::new();
|
||||
root_path.push(snapshot.root_name());
|
||||
root_path.push(&dir);
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len(),
|
||||
Some(&root_path),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
} else {
|
||||
output.sections.push(build_entry_output_section(
|
||||
start..output.text.len(),
|
||||
Some(&PathBuf::from(entry.as_str())),
|
||||
true,
|
||||
None,
|
||||
));
|
||||
}
|
||||
while let Some(_) = directory_stack.pop() {
|
||||
events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
events_rx.boxed()
|
||||
}
|
||||
|
||||
pub fn codeblock_fence_for_path(
|
||||
@@ -528,11 +553,14 @@ pub fn append_buffer_to_output(
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use assistant_slash_command::SlashCommandOutput;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
|
||||
use crate::slash_command::file_command::collect_files;
|
||||
|
||||
@@ -573,8 +601,9 @@ mod test {
|
||||
|
||||
let project = Project::test(fs, ["/root".as_ref()], cx).await;
|
||||
|
||||
let result_1 = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
|
||||
let result_1 =
|
||||
cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
|
||||
let result_1 = SlashCommandOutput::from_event_stream(result_1.boxed())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -582,17 +611,17 @@ mod test {
|
||||
// 4 files + 2 directories
|
||||
assert_eq!(result_1.sections.len(), 6);
|
||||
|
||||
let result_2 = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
|
||||
let result_2 =
|
||||
cx.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx));
|
||||
let result_2 = SlashCommandOutput::from_event_stream(result_2.boxed())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result_1, result_2);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let result =
|
||||
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
|
||||
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
|
||||
|
||||
assert!(result.text.starts_with("root/dir"));
|
||||
// 5 files + 2 directories
|
||||
@@ -635,8 +664,9 @@ mod test {
|
||||
|
||||
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
|
||||
let result =
|
||||
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
|
||||
let result = SlashCommandOutput::from_event_stream(result.boxed())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -696,8 +726,9 @@ mod test {
|
||||
|
||||
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
|
||||
let result =
|
||||
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
|
||||
let result = SlashCommandOutput::from_event_stream(result.boxed())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -720,6 +751,8 @@ mod test {
|
||||
assert_eq!(result.sections[6].label, "summercamp");
|
||||
assert_eq!(result.sections[7].label, "zed/assets/themes");
|
||||
|
||||
assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ impl SlashCommand for NowSlashCommand {
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
}
|
||||
.to_event_stream()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +162,8 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
text: output,
|
||||
sections,
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
.await
|
||||
})
|
||||
|
||||
@@ -102,7 +102,8 @@ impl SlashCommand for PromptSlashCommand {
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: true,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,6 +130,7 @@ impl SlashCommand for SearchSlashCommand {
|
||||
sections,
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.to_event_stream()
|
||||
})
|
||||
.await;
|
||||
|
||||
|
||||
@@ -85,7 +85,8 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
}],
|
||||
text: outline_text,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@@ -150,7 +150,7 @@ impl SlashCommand for TabSlashCommand {
|
||||
for (full_path, buffer, _) in tab_items_search.await? {
|
||||
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
|
||||
}
|
||||
Ok(output)
|
||||
Ok(output.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +97,8 @@ impl SlashCommand for TerminalSlashCommand {
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
}
|
||||
.to_event_stream()))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::prompts::PromptBuilder;
|
||||
|
||||
pub(crate) struct WorkflowSlashCommand {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
impl WorkflowSlashCommand {
|
||||
pub const NAME: &'static str = "workflow";
|
||||
|
||||
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
|
||||
Self { prompt_builder }
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for WorkflowSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
Self::NAME.into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Insert prompt to opt into the edit workflow".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
cx.spawn(|_cx| async move {
|
||||
let text = prompt_builder.generate_workflow_prompt()?;
|
||||
let range = 0..text.len();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::Route,
|
||||
label: "Workflow".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -178,7 +178,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
SlashCommandEntry::Info(info) => Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.spacing(ListItemSpacing::Dense)
|
||||
.selected(selected)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -224,7 +224,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
SlashCommandEntry::Advert { renderer, .. } => Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.spacing(ListItemSpacing::Dense)
|
||||
.selected(selected)
|
||||
.child(renderer(cx)),
|
||||
),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent,
|
||||
ModelSelector, DEFAULT_CONTEXT_LINES,
|
||||
ModelSelector, RequestType, DEFAULT_CONTEXT_LINES,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
@@ -251,7 +251,7 @@ impl TerminalInlineAssistant {
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(cx),
|
||||
.to_completion_request(RequestType::Chat, cx),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
pub mod context_server_tool;
|
||||
pub mod now_tool;
|
||||
|
||||
82
crates/assistant/src/tools/context_server_tool.rs
Normal file
82
crates/assistant/src/tools/context_server_tool.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::{anyhow, bail};
|
||||
use assistant_tool::Tool;
|
||||
use context_servers::manager::ContextServerManager;
|
||||
use context_servers::types;
|
||||
use gpui::Task;
|
||||
|
||||
pub struct ContextServerTool {
|
||||
server_id: String,
|
||||
tool: types::Tool,
|
||||
}
|
||||
|
||||
impl ContextServerTool {
|
||||
pub fn new(server_id: impl Into<String>, tool: types::Tool) -> Self {
|
||||
Self {
|
||||
server_id: server_id.into(),
|
||||
tool,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tool for ContextServerTool {
|
||||
fn name(&self) -> String {
|
||||
self.tool.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.tool.description.clone().unwrap_or_default()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
match &self.tool.input_schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
serde_json::Value::Object(map) if map.is_empty() => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
}
|
||||
_ => self.tool.input_schema.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: std::sync::Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_workspace: gpui::WeakView<workspace::Workspace>,
|
||||
cx: &mut ui::WindowContext,
|
||||
) -> gpui::Task<gpui::Result<String>> {
|
||||
let manager = ContextServerManager::global(cx);
|
||||
let manager = manager.read(cx);
|
||||
if let Some(server) = manager.get_server(&self.server_id) {
|
||||
cx.foreground_executor().spawn({
|
||||
let tool_name = self.tool.name.clone();
|
||||
async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
bail!("Context server not initialized");
|
||||
};
|
||||
|
||||
let arguments = if let serde_json::Value::Object(map) = input {
|
||||
Some(map.into_iter().collect())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
log::trace!(
|
||||
"Running tool: {} with arguments: {:?}",
|
||||
tool_name,
|
||||
arguments
|
||||
);
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
|
||||
let tool_result = match response.tool_result {
|
||||
serde_json::Value::String(s) => s,
|
||||
_ => serde_json::to_string(&response.tool_result)?,
|
||||
};
|
||||
Ok(tool_result)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,15 @@ path = "src/assistant_slash_command.rs"
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
derive_more.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
mod slash_command_registry;
|
||||
|
||||
use anyhow::Result;
|
||||
use futures::stream::{self, BoxStream};
|
||||
use futures::StreamExt;
|
||||
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -56,7 +58,7 @@ pub struct ArgumentCompletion {
|
||||
pub replace_previous_arguments: bool,
|
||||
}
|
||||
|
||||
pub type SlashCommandResult = Result<SlashCommandOutput>;
|
||||
pub type SlashCommandResult = Result<BoxStream<'static, Result<SlashCommandEvent>>>;
|
||||
|
||||
pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn name(&self) -> String;
|
||||
@@ -98,13 +100,146 @@ pub type RenderFoldPlaceholder = Arc<
|
||||
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
|
||||
>;
|
||||
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum SlashCommandContent {
|
||||
Text {
|
||||
text: String,
|
||||
run_commands_in_text: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum SlashCommandEvent {
|
||||
StartSection {
|
||||
icon: IconName,
|
||||
label: SharedString,
|
||||
metadata: Option<serde_json::Value>,
|
||||
},
|
||||
Content(SlashCommandContent),
|
||||
EndSection {
|
||||
metadata: Option<serde_json::Value>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Clone)]
|
||||
pub struct SlashCommandOutput {
|
||||
pub text: String,
|
||||
pub sections: Vec<SlashCommandOutputSection<usize>>,
|
||||
pub run_commands_in_text: bool,
|
||||
}
|
||||
|
||||
impl SlashCommandOutput {
|
||||
pub fn ensure_valid_section_ranges(&mut self) {
|
||||
for section in &mut self.sections {
|
||||
section.range.start = section.range.start.min(self.text.len());
|
||||
section.range.end = section.range.end.min(self.text.len());
|
||||
while !self.text.is_char_boundary(section.range.start) {
|
||||
section.range.start -= 1;
|
||||
}
|
||||
while !self.text.is_char_boundary(section.range.end) {
|
||||
section.range.end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
|
||||
pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
|
||||
self.ensure_valid_section_ranges();
|
||||
|
||||
let mut events = Vec::new();
|
||||
let mut last_section_end = 0;
|
||||
|
||||
for section in self.sections {
|
||||
if last_section_end < section.range.start {
|
||||
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: self
|
||||
.text
|
||||
.get(last_section_end..section.range.start)
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
run_commands_in_text: self.run_commands_in_text,
|
||||
})));
|
||||
}
|
||||
|
||||
events.push(Ok(SlashCommandEvent::StartSection {
|
||||
icon: section.icon,
|
||||
label: section.label,
|
||||
metadata: section.metadata.clone(),
|
||||
}));
|
||||
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: self
|
||||
.text
|
||||
.get(section.range.start..section.range.end)
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
run_commands_in_text: self.run_commands_in_text,
|
||||
})));
|
||||
events.push(Ok(SlashCommandEvent::EndSection {
|
||||
metadata: section.metadata,
|
||||
}));
|
||||
|
||||
last_section_end = section.range.end;
|
||||
}
|
||||
|
||||
if last_section_end < self.text.len() {
|
||||
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: self.text[last_section_end..].to_string(),
|
||||
run_commands_in_text: self.run_commands_in_text,
|
||||
})));
|
||||
}
|
||||
|
||||
stream::iter(events).boxed()
|
||||
}
|
||||
|
||||
pub async fn from_event_stream(
|
||||
mut events: BoxStream<'static, Result<SlashCommandEvent>>,
|
||||
) -> Result<SlashCommandOutput> {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let mut section_stack = Vec::new();
|
||||
|
||||
while let Some(event) = events.next().await {
|
||||
match event? {
|
||||
SlashCommandEvent::StartSection {
|
||||
icon,
|
||||
label,
|
||||
metadata,
|
||||
} => {
|
||||
let start = output.text.len();
|
||||
section_stack.push(SlashCommandOutputSection {
|
||||
range: start..start,
|
||||
icon,
|
||||
label,
|
||||
metadata,
|
||||
});
|
||||
}
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text,
|
||||
run_commands_in_text,
|
||||
}) => {
|
||||
output.text.push_str(&text);
|
||||
output.run_commands_in_text = run_commands_in_text;
|
||||
|
||||
if let Some(section) = section_stack.last_mut() {
|
||||
section.range.end = output.text.len();
|
||||
}
|
||||
}
|
||||
SlashCommandEvent::EndSection { metadata } => {
|
||||
if let Some(mut section) = section_stack.pop() {
|
||||
section.metadata = metadata;
|
||||
output.sections.push(section);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(section) = section_stack.pop() {
|
||||
output.sections.push(section);
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct SlashCommandOutputSection<T> {
|
||||
pub range: Range<T>,
|
||||
@@ -118,3 +253,243 @@ impl SlashCommandOutputSection<language::Anchor> {
|
||||
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_slash_command_output_to_events_round_trip() {
|
||||
// Test basic output consisting of a single section.
|
||||
{
|
||||
let text = "Hello, world!".to_string();
|
||||
let range = 0..text.len();
|
||||
let output = SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::Code,
|
||||
label: "Section 1".into(),
|
||||
metadata: None,
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
};
|
||||
|
||||
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
|
||||
let events = events
|
||||
.into_iter()
|
||||
.filter_map(|event| event.ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
events,
|
||||
vec![
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::Code,
|
||||
label: "Section 1".into(),
|
||||
metadata: None
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Hello, world!".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection { metadata: None }
|
||||
]
|
||||
);
|
||||
|
||||
let new_output =
|
||||
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(new_output, output);
|
||||
}
|
||||
|
||||
// Test output where the sections do not comprise all of the text.
|
||||
{
|
||||
let text = "Apple\nCucumber\nBanana\n".to_string();
|
||||
let output = SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![
|
||||
SlashCommandOutputSection {
|
||||
range: 0..6,
|
||||
icon: IconName::Check,
|
||||
label: "Fruit".into(),
|
||||
metadata: None,
|
||||
},
|
||||
SlashCommandOutputSection {
|
||||
range: 15..22,
|
||||
icon: IconName::Check,
|
||||
label: "Fruit".into(),
|
||||
metadata: None,
|
||||
},
|
||||
],
|
||||
run_commands_in_text: false,
|
||||
};
|
||||
|
||||
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
|
||||
let events = events
|
||||
.into_iter()
|
||||
.filter_map(|event| event.ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
events,
|
||||
vec![
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::Check,
|
||||
label: "Fruit".into(),
|
||||
metadata: None
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Apple\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection { metadata: None },
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Cucumber\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::Check,
|
||||
label: "Fruit".into(),
|
||||
metadata: None
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Banana\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection { metadata: None }
|
||||
]
|
||||
);
|
||||
|
||||
let new_output =
|
||||
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(new_output, output);
|
||||
}
|
||||
|
||||
// Test output consisting of multiple sections.
|
||||
{
|
||||
let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string();
|
||||
let output = SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![
|
||||
SlashCommandOutputSection {
|
||||
range: 0..6,
|
||||
icon: IconName::FileCode,
|
||||
label: "Section 1".into(),
|
||||
metadata: Some(json!({ "a": true })),
|
||||
},
|
||||
SlashCommandOutputSection {
|
||||
range: 7..13,
|
||||
icon: IconName::FileDoc,
|
||||
label: "Section 2".into(),
|
||||
metadata: Some(json!({ "b": true })),
|
||||
},
|
||||
SlashCommandOutputSection {
|
||||
range: 14..20,
|
||||
icon: IconName::FileGit,
|
||||
label: "Section 3".into(),
|
||||
metadata: Some(json!({ "c": true })),
|
||||
},
|
||||
SlashCommandOutputSection {
|
||||
range: 21..27,
|
||||
icon: IconName::FileToml,
|
||||
label: "Section 4".into(),
|
||||
metadata: Some(json!({ "d": true })),
|
||||
},
|
||||
],
|
||||
run_commands_in_text: false,
|
||||
};
|
||||
|
||||
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
|
||||
let events = events
|
||||
.into_iter()
|
||||
.filter_map(|event| event.ok())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
events,
|
||||
vec![
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::FileCode,
|
||||
label: "Section 1".into(),
|
||||
metadata: Some(json!({ "a": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Line 1".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection {
|
||||
metadata: Some(json!({ "a": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::FileDoc,
|
||||
label: "Section 2".into(),
|
||||
metadata: Some(json!({ "b": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Line 2".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection {
|
||||
metadata: Some(json!({ "b": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::FileGit,
|
||||
label: "Section 3".into(),
|
||||
metadata: Some(json!({ "c": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Line 3".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection {
|
||||
metadata: Some(json!({ "c": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::StartSection {
|
||||
icon: IconName::FileToml,
|
||||
label: "Section 4".into(),
|
||||
metadata: Some(json!({ "d": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "Line 4".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
SlashCommandEvent::EndSection {
|
||||
metadata: Some(json!({ "d": true }))
|
||||
},
|
||||
SlashCommandEvent::Content(SlashCommandContent::Text {
|
||||
text: "\n".into(),
|
||||
run_commands_in_text: false
|
||||
}),
|
||||
]
|
||||
);
|
||||
|
||||
let new_output =
|
||||
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(new_output, output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ use gpui::{
|
||||
};
|
||||
|
||||
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
|
||||
use paths::remote_servers_dir;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde_derive::Serialize;
|
||||
@@ -83,9 +84,9 @@ pub struct AutoUpdater {
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct JsonRelease {
|
||||
version: String,
|
||||
url: String,
|
||||
pub struct JsonRelease {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
struct MacOsUnmounter {
|
||||
@@ -431,10 +432,11 @@ impl AutoUpdater {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub async fn get_latest_remote_server_release(
|
||||
pub async fn download_remote_server_release(
|
||||
os: &str,
|
||||
arch: &str,
|
||||
mut release_channel: ReleaseChannel,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<PathBuf> {
|
||||
let this = cx.update(|cx| {
|
||||
@@ -444,15 +446,12 @@ impl AutoUpdater {
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
})??;
|
||||
|
||||
if release_channel == ReleaseChannel::Dev {
|
||||
release_channel = ReleaseChannel::Nightly;
|
||||
}
|
||||
|
||||
let release = Self::get_latest_release(
|
||||
let release = Self::get_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
@@ -467,19 +466,23 @@ impl AutoUpdater {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
|
||||
if smol::fs::metadata(&version_path).await.is_err() {
|
||||
log::info!("downloading zed-remote-server {os} {arch}");
|
||||
log::info!(
|
||||
"downloading zed-remote-server {os} {arch} version {}",
|
||||
release.version
|
||||
);
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
}
|
||||
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
pub async fn get_latest_remote_server_release_url(
|
||||
pub async fn get_remote_server_release_url(
|
||||
os: &str,
|
||||
arch: &str,
|
||||
mut release_channel: ReleaseChannel,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<(String, String)> {
|
||||
) -> Result<(JsonRelease, String)> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
@@ -487,15 +490,12 @@ impl AutoUpdater {
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
})??;
|
||||
|
||||
if release_channel == ReleaseChannel::Dev {
|
||||
release_channel = ReleaseChannel::Nightly;
|
||||
}
|
||||
|
||||
let release = Self::get_latest_release(
|
||||
let release = Self::get_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
@@ -504,7 +504,57 @@ impl AutoUpdater {
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let body = serde_json::to_string(&update_request_body)?;
|
||||
|
||||
Ok((release.url, body))
|
||||
Ok((release, body))
|
||||
}
|
||||
|
||||
async fn get_release(
|
||||
this: &Model<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
version: Option<SemanticVersion>,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<JsonRelease> {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
|
||||
if let Some(version) = version {
|
||||
let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
|
||||
|
||||
let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
|
||||
|
||||
Ok(JsonRelease {
|
||||
version: version.to_string(),
|
||||
url: client.build_url(&url),
|
||||
})
|
||||
} else {
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, os, arch
|
||||
));
|
||||
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
|
||||
let mut response = client.get(&url_string, Default::default(), true).await?;
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
));
|
||||
}
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_latest_release(
|
||||
@@ -515,38 +565,7 @@ impl AutoUpdater {
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<JsonRelease> {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, os, arch
|
||||
));
|
||||
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
|
||||
let mut response = client.get(&url_string, Default::default(), true).await?;
|
||||
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("error reading release")?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
))?;
|
||||
}
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
Self::get_release(this, asset, os, arch, None, release_channel, cx).await
|
||||
}
|
||||
|
||||
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
|
||||
@@ -661,12 +680,15 @@ async fn download_remote_server_binary(
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
|
||||
let mut temp_file = File::create(&temp).await?;
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
smol::io::copy(response.body_mut(), &mut temp_file).await?;
|
||||
smol::fs::rename(&temp, &target_path).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -1194,26 +1194,15 @@ impl Room {
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<u64>> {
|
||||
let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
|
||||
{
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: vec![],
|
||||
dev_server_project_id: Some(dev_server_project_id.0),
|
||||
is_ssh_project: false,
|
||||
})
|
||||
} else {
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
return Task::ready(Ok(project_id));
|
||||
}
|
||||
if let Some(project_id) = project.read(cx).remote_id() {
|
||||
return Task::ready(Ok(project_id));
|
||||
}
|
||||
|
||||
self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
dev_server_project_id: None,
|
||||
is_ssh_project: project.read(cx).is_via_ssh(),
|
||||
})
|
||||
};
|
||||
let request = self.client.request(proto::ShareProject {
|
||||
room_id: self.id(),
|
||||
worktrees: project.read(cx).worktree_metadata_protos(cx),
|
||||
is_ssh_project: project.read(cx).is_via_ssh(),
|
||||
});
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
|
||||
@@ -3,7 +3,7 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -33,30 +33,11 @@ struct NotesVersion {
|
||||
version: clock::Global,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HostedProject {
|
||||
project_id: ProjectId,
|
||||
channel_id: ChannelId,
|
||||
name: SharedString,
|
||||
_visibility: proto::ChannelVisibility,
|
||||
}
|
||||
impl From<proto::HostedProject> for HostedProject {
|
||||
fn from(project: proto::HostedProject) -> Self {
|
||||
Self {
|
||||
project_id: ProjectId(project.project_id),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
_visibility: project.visibility(),
|
||||
name: project.name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
pub struct ChannelStore {
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
opened_buffers: HashMap<ChannelId, OpenedModelHandle<ChannelBuffer>>,
|
||||
@@ -85,7 +66,6 @@ pub struct ChannelState {
|
||||
observed_notes_version: NotesVersion,
|
||||
observed_chat_message: Option<u64>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<ProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -216,7 +196,6 @@ impl ChannelStore {
|
||||
channel_invitations: Vec::default(),
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
hosted_projects: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -316,19 +295,6 @@ impl ChannelStore {
|
||||
self.channel_index.by_id().get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn projects_for_id(&self, channel_id: ChannelId) -> Vec<(SharedString, ProjectId)> {
|
||||
let mut projects: Vec<(SharedString, ProjectId)> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.projects.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| Some((self.hosted_projects.get(&id)?.name.clone(), id)))
|
||||
.collect();
|
||||
projects.sort();
|
||||
projects
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedModelHandle::Open(buffer) = buffer {
|
||||
@@ -1102,9 +1068,7 @@ impl ChannelStore {
|
||||
let channels_changed = !payload.channels.is_empty()
|
||||
|| !payload.delete_channels.is_empty()
|
||||
|| !payload.latest_channel_message_ids.is_empty()
|
||||
|| !payload.latest_channel_buffer_versions.is_empty()
|
||||
|| !payload.hosted_projects.is_empty()
|
||||
|| !payload.deleted_hosted_projects.is_empty();
|
||||
|| !payload.latest_channel_buffer_versions.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
@@ -1161,34 +1125,6 @@ impl ChannelStore {
|
||||
.or_default()
|
||||
.update_latest_message_id(latest_channel_message.message_id);
|
||||
}
|
||||
|
||||
for hosted_project in payload.hosted_projects {
|
||||
let hosted_project: HostedProject = hosted_project.into();
|
||||
if let Some(old_project) = self
|
||||
.hosted_projects
|
||||
.insert(hosted_project.project_id, hosted_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(hosted_project.channel_id)
|
||||
.or_default()
|
||||
.add_hosted_project(hosted_project.project_id);
|
||||
}
|
||||
|
||||
for hosted_project_id in payload.deleted_hosted_projects {
|
||||
let hosted_project_id = ProjectId(hosted_project_id);
|
||||
|
||||
if let Some(old_project) = self.hosted_projects.remove(&hosted_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1295,12 +1231,4 @@ impl ChannelState {
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn add_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.insert(project_id);
|
||||
}
|
||||
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ pub enum CliRequest {
|
||||
urls: Vec<String>,
|
||||
wait: bool,
|
||||
open_new_workspace: Option<bool>,
|
||||
dev_server_token: Option<String>,
|
||||
env: Option<HashMap<String, String>>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -151,6 +151,12 @@ fn main() -> Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(_) = args.dev_server_token {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
|
||||
))?;
|
||||
}
|
||||
|
||||
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
|
||||
let exit_status = exit_status.clone();
|
||||
move || {
|
||||
@@ -162,7 +168,6 @@ fn main() -> Result<()> {
|
||||
urls,
|
||||
wait: args.wait,
|
||||
open_new_workspace,
|
||||
dev_server_token: args.dev_server_token,
|
||||
env,
|
||||
})?;
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::fmt;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -54,15 +53,6 @@ pub use rpc::*;
|
||||
pub use telemetry_events::Event;
|
||||
pub use user::*;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct DevServerToken(pub String);
|
||||
|
||||
impl fmt::Display for DevServerToken {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
static ZED_SERVER_URL: LazyLock<Option<String>> =
|
||||
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
|
||||
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
|
||||
@@ -304,20 +294,14 @@ struct ClientState {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum Credentials {
|
||||
DevServer { token: DevServerToken },
|
||||
User { user_id: u64, access_token: String },
|
||||
pub struct Credentials {
|
||||
pub user_id: u64,
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
impl Credentials {
|
||||
pub fn authorization_header(&self) -> String {
|
||||
match self {
|
||||
Credentials::DevServer { token } => format!("dev-server-token {}", token),
|
||||
Credentials::User {
|
||||
user_id,
|
||||
access_token,
|
||||
} => format!("{} {}", user_id, access_token),
|
||||
}
|
||||
format!("{} {}", self.user_id, self.access_token)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -600,11 +584,11 @@ impl Client {
|
||||
}
|
||||
|
||||
pub fn user_id(&self) -> Option<u64> {
|
||||
if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() {
|
||||
Some(*user_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
self.state
|
||||
.read()
|
||||
.credentials
|
||||
.as_ref()
|
||||
.map(|credentials| credentials.user_id)
|
||||
}
|
||||
|
||||
pub fn peer_id(&self) -> Option<PeerId> {
|
||||
@@ -793,11 +777,6 @@ impl Client {
|
||||
.is_some()
|
||||
}
|
||||
|
||||
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
|
||||
self.state.write().credentials = Some(Credentials::DevServer { token });
|
||||
self
|
||||
}
|
||||
|
||||
#[async_recursion(?Send)]
|
||||
pub async fn authenticate_and_connect(
|
||||
self: &Arc<Self>,
|
||||
@@ -848,9 +827,7 @@ impl Client {
|
||||
}
|
||||
}
|
||||
let credentials = credentials.unwrap();
|
||||
if let Credentials::User { user_id, .. } = &credentials {
|
||||
self.set_id(*user_id);
|
||||
}
|
||||
self.set_id(credentials.user_id);
|
||||
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Connecting, cx);
|
||||
@@ -866,9 +843,8 @@ impl Client {
|
||||
Ok(conn) => {
|
||||
self.state.write().credentials = Some(credentials.clone());
|
||||
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
|
||||
if let Credentials::User{user_id, access_token} = credentials {
|
||||
self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
|
||||
}
|
||||
self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err();
|
||||
|
||||
}
|
||||
|
||||
futures::select_biased! {
|
||||
@@ -1301,7 +1277,7 @@ impl Client {
|
||||
.decrypt_string(&access_token)
|
||||
.context("failed to decrypt access token")?;
|
||||
|
||||
Ok(Credentials::User {
|
||||
Ok(Credentials {
|
||||
user_id: user_id.parse()?,
|
||||
access_token,
|
||||
})
|
||||
@@ -1422,7 +1398,7 @@ impl Client {
|
||||
|
||||
// Use the admin API token to authenticate as the impersonated user.
|
||||
api_token.insert_str(0, "ADMIN_TOKEN:");
|
||||
Ok(Credentials::User {
|
||||
Ok(Credentials {
|
||||
user_id: response.user.id,
|
||||
access_token: api_token,
|
||||
})
|
||||
@@ -1667,7 +1643,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider {
|
||||
|
||||
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
|
||||
|
||||
Some(Credentials::User {
|
||||
Some(Credentials {
|
||||
user_id: credentials.user_id,
|
||||
access_token: credentials.access_token,
|
||||
})
|
||||
@@ -1721,7 +1697,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
|
||||
.await
|
||||
.log_err()??;
|
||||
|
||||
Some(Credentials::User {
|
||||
Some(Credentials {
|
||||
user_id: user_id.parse().ok()?,
|
||||
access_token: String::from_utf8(access_token).ok()?,
|
||||
})
|
||||
@@ -1855,7 +1831,7 @@ mod tests {
|
||||
// Time out when client tries to connect.
|
||||
client.override_authenticate(move |cx| {
|
||||
cx.background_executor().spawn(async move {
|
||||
Ok(Credentials::User {
|
||||
Ok(Credentials {
|
||||
user_id,
|
||||
access_token: "token".into(),
|
||||
})
|
||||
|
||||
@@ -49,7 +49,7 @@ impl FakeServer {
|
||||
let mut state = state.lock();
|
||||
state.auth_count += 1;
|
||||
let access_token = state.access_token.to_string();
|
||||
Ok(Credentials::User {
|
||||
Ok(Credentials {
|
||||
user_id: client_user_id,
|
||||
access_token,
|
||||
})
|
||||
@@ -73,7 +73,7 @@ impl FakeServer {
|
||||
}
|
||||
|
||||
if credentials
|
||||
!= (Credentials::User {
|
||||
!= (Credentials {
|
||||
user_id: client_user_id,
|
||||
access_token: state.lock().access_token.to_string(),
|
||||
})
|
||||
|
||||
@@ -28,9 +28,6 @@ impl std::fmt::Display for ChannelId {
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct ProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct DevServerId(pub u64);
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
@@ -51,6 +48,7 @@ pub struct Collaborator {
|
||||
pub peer_id: proto::PeerId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
@@ -827,6 +825,7 @@ impl Collaborator {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +86,6 @@ client = { workspace = true, features = ["test-support"] }
|
||||
collab_ui = { workspace = true, features = ["test-support"] }
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
file_finder.workspace = true
|
||||
@@ -94,7 +93,6 @@ fs = { workspace = true, features = ["test-support"] }
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
git_hosting_providers.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
headless.workspace = true
|
||||
hyper.workspace = true
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -52,9 +52,7 @@ CREATE TABLE "projects" (
|
||||
"host_user_id" INTEGER REFERENCES users (id),
|
||||
"host_connection_id" INTEGER,
|
||||
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"hosted_project_id" INTEGER REFERENCES hosted_projects (id),
|
||||
"dev_server_project_id" INTEGER REFERENCES dev_server_projects(id)
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
|
||||
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
|
||||
@@ -399,30 +397,6 @@ CREATE TABLE rate_buckets (
|
||||
);
|
||||
CREATE INDEX idx_user_id_rate_limit ON rate_buckets (user_id, rate_limit_name);
|
||||
|
||||
CREATE TABLE hosted_projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
name TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL,
|
||||
deleted_at TIMESTAMP NULL
|
||||
);
|
||||
CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id);
|
||||
CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL);
|
||||
|
||||
CREATE TABLE dev_servers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||
name TEXT NOT NULL,
|
||||
ssh_connection_string TEXT,
|
||||
hashed_token TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE dev_server_projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
|
||||
paths TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS billing_preferences (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE projects DROP COLUMN dev_server_project_id;
|
||||
ALTER TABLE projects DROP COLUMN hosted_project_id;
|
||||
|
||||
DROP TABLE hosted_projects;
|
||||
DROP TABLE dev_server_projects;
|
||||
DROP TABLE dev_servers;
|
||||
@@ -252,7 +252,10 @@ async fn create_billing_subscription(
|
||||
|
||||
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
|
||||
let stripe_model = stripe_billing.register_model(default_model).await?;
|
||||
let success_url = format!("{}/account", app.config.zed_dot_dev_url());
|
||||
let success_url = format!(
|
||||
"{}/account?checkout_complete=1",
|
||||
app.config.zed_dot_dev_url()
|
||||
);
|
||||
let checkout_session_url = stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId},
|
||||
db::{self, AccessTokenId, Database, UserId},
|
||||
rpc::Principal,
|
||||
AppState, Error, Result,
|
||||
};
|
||||
@@ -44,19 +44,10 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
||||
|
||||
let first = auth_header.next().unwrap_or("");
|
||||
if first == "dev-server-token" {
|
||||
let dev_server_token = auth_header.next().ok_or_else(|| {
|
||||
Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"missing dev-server-token token in authorization header".to_string(),
|
||||
)
|
||||
})?;
|
||||
let dev_server = verify_dev_server_token(dev_server_token, &state.db)
|
||||
.await
|
||||
.map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?;
|
||||
|
||||
req.extensions_mut()
|
||||
.insert(Principal::DevServer(dev_server));
|
||||
return Ok::<_, Error>(next.run(req).await);
|
||||
Err(Error::http(
|
||||
StatusCode::UNAUTHORIZED,
|
||||
"Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(),
|
||||
))?;
|
||||
}
|
||||
|
||||
let user_id = UserId(first.parse().map_err(|_| {
|
||||
@@ -240,41 +231,6 @@ pub async fn verify_access_token(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
|
||||
format!("{}.{}", id, access_token)
|
||||
}
|
||||
|
||||
pub async fn verify_dev_server_token(
|
||||
dev_server_token: &str,
|
||||
db: &Arc<Database>,
|
||||
) -> anyhow::Result<dev_server::Model> {
|
||||
let (id, token) = split_dev_server_token(dev_server_token)?;
|
||||
let token_hash = hash_access_token(token);
|
||||
let server = db.get_dev_server(id).await?;
|
||||
|
||||
if server
|
||||
.hashed_token
|
||||
.as_bytes()
|
||||
.ct_eq(token_hash.as_ref())
|
||||
.into()
|
||||
{
|
||||
Ok(server)
|
||||
} else {
|
||||
Err(anyhow!("wrong token for dev server"))
|
||||
}
|
||||
}
|
||||
|
||||
// a dev_server_token has the format <id>.<base64>. This is to make them
|
||||
// relatively easy to copy/paste around.
|
||||
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
|
||||
let mut parts = dev_server_token.splitn(2, '.');
|
||||
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
|
||||
let token = parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
|
||||
Ok((id, token))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rand::thread_rng;
|
||||
|
||||
@@ -617,7 +617,6 @@ pub struct ChannelsForUser {
|
||||
pub channels: Vec<Channel>,
|
||||
pub channel_memberships: Vec<channel_member::Model>,
|
||||
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
|
||||
pub hosted_projects: Vec<proto::HostedProject>,
|
||||
pub invited_channels: Vec<Channel>,
|
||||
|
||||
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
|
||||
@@ -726,7 +725,6 @@ pub struct Project {
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
pub dev_server_project_id: Option<DevServerProjectId>,
|
||||
}
|
||||
|
||||
pub struct ProjectCollaborator {
|
||||
@@ -742,6 +740,7 @@ impl ProjectCollaborator {
|
||||
peer_id: Some(self.connection_id.into()),
|
||||
replica_id: self.replica_id.0 as u32,
|
||||
user_id: self.user_id.to_proto(),
|
||||
is_host: self.is_host,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,7 +79,6 @@ id_type!(ChannelChatParticipantId);
|
||||
id_type!(ChannelId);
|
||||
id_type!(ChannelMemberId);
|
||||
id_type!(ContactId);
|
||||
id_type!(DevServerId);
|
||||
id_type!(ExtensionId);
|
||||
id_type!(FlagId);
|
||||
id_type!(FollowerId);
|
||||
@@ -89,7 +88,6 @@ id_type!(NotificationId);
|
||||
id_type!(NotificationKindId);
|
||||
id_type!(ProjectCollaboratorId);
|
||||
id_type!(ProjectId);
|
||||
id_type!(DevServerProjectId);
|
||||
id_type!(ReplicaId);
|
||||
id_type!(RoomId);
|
||||
id_type!(RoomParticipantId);
|
||||
@@ -277,12 +275,6 @@ impl From<ChannelVisibility> for i32 {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
|
||||
pub enum PrincipalId {
|
||||
UserId(UserId),
|
||||
DevServerId(DevServerId),
|
||||
}
|
||||
|
||||
/// Indicate whether a [Buffer] has permissions to edit.
|
||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||
pub enum Capability {
|
||||
|
||||
@@ -8,11 +8,8 @@ pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod dev_server_projects;
|
||||
pub mod dev_servers;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
pub mod hosted_projects;
|
||||
pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod processed_stripe_events;
|
||||
|
||||
@@ -116,6 +116,7 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
@@ -222,6 +223,7 @@ impl Database {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
@@ -257,6 +259,7 @@ impl Database {
|
||||
peer_id: Some(db_collaborator.connection().into()),
|
||||
replica_id: db_collaborator.replica_id.0 as u32,
|
||||
user_id: db_collaborator.user_id.to_proto(),
|
||||
is_host: false,
|
||||
})
|
||||
} else {
|
||||
collaborator_ids_to_remove.push(db_collaborator.id);
|
||||
@@ -385,6 +388,7 @@ impl Database {
|
||||
peer_id: Some(connection.into()),
|
||||
replica_id: row.replica_id.0 as u32,
|
||||
user_id: row.user_id.to_proto(),
|
||||
is_host: false,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -615,15 +615,10 @@ impl Database {
|
||||
.observed_channel_messages(&channel_ids, user_id, tx)
|
||||
.await?;
|
||||
|
||||
let hosted_projects = self
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channel_memberships,
|
||||
channels,
|
||||
invited_channels,
|
||||
hosted_projects,
|
||||
channel_participants,
|
||||
latest_buffer_versions,
|
||||
latest_channel_messages,
|
||||
|
||||
@@ -1,365 +1 @@
|
||||
use anyhow::anyhow;
|
||||
use rpc::{
|
||||
proto::{self},
|
||||
ConnectionId,
|
||||
};
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
|
||||
IntoActiveModel, ModelTrait, QueryFilter,
|
||||
};
|
||||
|
||||
use crate::db::ProjectId;
|
||||
|
||||
use super::{
|
||||
dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
|
||||
DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
|
||||
};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
) -> crate::Result<dev_server_project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(
|
||||
dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
anyhow!("no dev server project with id {}", dev_server_project_id)
|
||||
})?,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_projects_for_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> crate::Result<Vec<proto::DevServerProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
self.get_projects_for_dev_server_internal(dev_server_id, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_projects_for_dev_server_internal(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<proto::DevServerProject>> {
|
||||
let servers = dev_server_project::Entity::find()
|
||||
.filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
|
||||
.find_also_related(project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn dev_server_project_ids_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<DevServerProjectId>> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.find_with_related(dev_server_project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_servers
|
||||
.into_iter()
|
||||
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn owner_for_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<UserId> {
|
||||
let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.and_then(|(_, dev_server)| dev_server)
|
||||
.ok_or_else(|| anyhow!("no dev server project"))?;
|
||||
|
||||
Ok(dev_server.user_id)
|
||||
}
|
||||
|
||||
pub async fn get_stale_dev_server_projects(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ProjectId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let projects = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id))
|
||||
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(projects.into_iter().map(|p| p.id).collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_dev_server_project(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
path: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
|
||||
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
dev_server_id: ActiveValue::Set(dev_server_id),
|
||||
paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((project, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_dev_server_project(
|
||||
&self,
|
||||
id: DevServerProjectId,
|
||||
paths: &[String],
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(move |tx| async move {
|
||||
let paths = paths.to_owned();
|
||||
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
else {
|
||||
return Err(anyhow!("no such dev server project"))?;
|
||||
};
|
||||
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
let mut project = project.into_active_model();
|
||||
project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
|
||||
let project = project.update(&*tx).await?;
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((project, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
project::Entity::delete_many()
|
||||
.filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected != 1 {
|
||||
return Err(anyhow!(
|
||||
"no dev server project with id {}",
|
||||
dev_server_project_id
|
||||
))?;
|
||||
}
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let projects = self
|
||||
.get_projects_for_dev_server_internal(dev_server_id, &tx)
|
||||
.await?;
|
||||
Ok((projects, status))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_dev_server_project(
|
||||
&self,
|
||||
dev_server_project_id: DevServerProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> crate::Result<(
|
||||
proto::DevServerProject,
|
||||
UserId,
|
||||
proto::DevServerProjectsUpdate,
|
||||
)> {
|
||||
self.transaction(|tx| async move {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
|
||||
let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
anyhow!("no dev server project with id {}", dev_server_project_id)
|
||||
})?;
|
||||
|
||||
if dev_server_project.dev_server_id != dev_server_id {
|
||||
return Err(anyhow!("dev server project shared from wrong server"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
host_user_id: ActiveValue::Set(None),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let status = self
|
||||
.dev_server_projects_update_internal(dev_server.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((
|
||||
dev_server_project.to_proto(Some(project)),
|
||||
dev_server.user_id,
|
||||
status,
|
||||
))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reshare_dev_server_projects(
|
||||
&self,
|
||||
reshared_projects: &Vec<proto::UpdateProject>,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ResharedProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for reshared_project in reshared_projects {
|
||||
let project_id = ProjectId::from_proto(reshared_project.project_id);
|
||||
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(dev_server_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
|
||||
if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
|
||||
return Err(anyhow!("dev server project reshared from wrong server"))?;
|
||||
}
|
||||
|
||||
let Ok(old_connection_id) = project.host_connection() else {
|
||||
return Err(anyhow!("dev server project was not shared"))?;
|
||||
};
|
||||
|
||||
project::Entity::update(project::ActiveModel {
|
||||
id: ActiveValue::set(project_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
ret.push(super::ResharedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators: collaborators
|
||||
.iter()
|
||||
.map(|collaborator| super::ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
});
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_dev_server_projects(
|
||||
&self,
|
||||
rejoined_projects: &Vec<proto::RejoinProject>,
|
||||
user_id: UserId,
|
||||
connection_id: ConnectionId,
|
||||
) -> crate::Result<Vec<RejoinedProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for rejoined_project in rejoined_projects {
|
||||
if let Some(project) = self
|
||||
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
|
||||
.await?
|
||||
{
|
||||
ret.push(project);
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,222 +1 @@
|
||||
use rpc::proto;
|
||||
use sea_orm::{
|
||||
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
|
||||
};
|
||||
|
||||
use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> crate::Result<dev_server::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_dev_server_for_user(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<dev_server::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
let server = dev_server::Entity::find_by_id(dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
|
||||
if server.user_id != user_id {
|
||||
return Err(anyhow::anyhow!(
|
||||
"dev server {} is not owned by user {}",
|
||||
dev_server_id,
|
||||
user_id
|
||||
))?;
|
||||
}
|
||||
Ok(server)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dev_server_projects_update(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
self.dev_server_projects_update_internal(user_id, &tx).await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dev_server_projects_update_internal(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
let dev_servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::UserId.eq(user_id))
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = dev_server_project::Entity::find()
|
||||
.filter(
|
||||
dev_server_project::Column::DevServerId
|
||||
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
|
||||
)
|
||||
.find_also_related(super::project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
|
||||
Ok(proto::DevServerProjectsUpdate {
|
||||
dev_servers: dev_servers
|
||||
.into_iter()
|
||||
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
|
||||
.collect(),
|
||||
dev_server_projects: dev_server_projects
|
||||
.into_iter()
|
||||
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_dev_server(
|
||||
&self,
|
||||
name: &str,
|
||||
ssh_connection_string: Option<&str>,
|
||||
hashed_access_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
|
||||
self.transaction(|tx| async move {
|
||||
if name.trim().is_empty() {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
|
||||
name: ActiveValue::Set(name.trim().to_string()),
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
ssh_connection_string: ActiveValue::Set(
|
||||
ssh_connection_string.map(ToOwned::to_owned),
|
||||
),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok((dev_server, dev_server_projects))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_dev_server_token(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
hashed_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server::Entity::update(dev_server::ActiveModel {
|
||||
hashed_token: ActiveValue::Set(hashed_token.to_string()),
|
||||
..dev_server.clone().into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rename_dev_server(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
name: &str,
|
||||
ssh_connection_string: Option<&str>,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id || name.trim().is_empty() {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server::Entity::update(dev_server::ActiveModel {
|
||||
name: ActiveValue::Set(name.trim().to_string()),
|
||||
ssh_connection_string: ActiveValue::Set(
|
||||
ssh_connection_string.map(ToOwned::to_owned),
|
||||
),
|
||||
..dev_server.clone().into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_dev_server(
|
||||
&self,
|
||||
id: DevServerId,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<proto::DevServerProjectsUpdate> {
|
||||
self.transaction(|tx| async move {
|
||||
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
|
||||
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
|
||||
};
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
|
||||
}
|
||||
|
||||
dev_server_project::Entity::delete_many()
|
||||
.filter(dev_server_project::Column::DevServerId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
dev_server::Entity::delete(dev_server.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects = self
|
||||
.dev_server_projects_update_internal(user_id, &tx)
|
||||
.await?;
|
||||
|
||||
Ok(dev_server_projects)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
use rpc::{proto, ErrorCode};
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_hosted_projects(
|
||||
&self,
|
||||
channel_ids: &[ChannelId],
|
||||
roles: &HashMap<ChannelId, ChannelRole>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::HostedProject>> {
|
||||
let projects = hosted_project::Entity::find()
|
||||
.find_also_related(project::Entity)
|
||||
.filter(hosted_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.all(tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|(hosted_project, project)| {
|
||||
if hosted_project.deleted_at.is_some() {
|
||||
return None;
|
||||
}
|
||||
match hosted_project.visibility {
|
||||
ChannelVisibility::Public => {}
|
||||
ChannelVisibility::Members => {
|
||||
let is_visible = roles
|
||||
.get(&hosted_project.channel_id)
|
||||
.map(|role| role.can_see_all_descendants())
|
||||
.unwrap_or(false);
|
||||
if !is_visible {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
};
|
||||
Some(proto::HostedProject {
|
||||
project_id: project?.id.to_proto(),
|
||||
channel_id: hosted_project.channel_id.to_proto(),
|
||||
name: hosted_project.name.clone(),
|
||||
visibility: hosted_project.visibility.into(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
pub async fn get_hosted_project(
|
||||
&self,
|
||||
hosted_project_id: HostedProjectId,
|
||||
user_id: UserId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(hosted_project::Model, ChannelRole)> {
|
||||
let project = hosted_project::Entity::find_by_id(hosted_project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?;
|
||||
let channel = channel::Entity::find_by_id(project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchChannel))?;
|
||||
|
||||
let role = match project.visibility {
|
||||
ChannelVisibility::Public => {
|
||||
self.check_user_is_channel_participant(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
ChannelVisibility::Members => {
|
||||
self.check_user_is_channel_member(&channel, user_id, tx)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok((project, role))
|
||||
}
|
||||
|
||||
pub async fn is_hosted_project(&self, project_id: ProjectId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.map(|project| project.hosted_project_id.is_some())
|
||||
.ok_or_else(|| anyhow!(ErrorCode::NoSuchProject))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -32,7 +32,6 @@ impl Database {
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
is_ssh_project: bool,
|
||||
dev_server_project_id: Option<DevServerProjectId>,
|
||||
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
@@ -61,38 +60,6 @@ impl Database {
|
||||
return Err(anyhow!("guests cannot share projects"))?;
|
||||
}
|
||||
|
||||
if let Some(dev_server_project_id) = dev_server_project_id {
|
||||
let project = project::Entity::find()
|
||||
.filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project"))?;
|
||||
|
||||
let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id)
|
||||
.find_also_related(dev_server::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no dev_server_project"))?;
|
||||
|
||||
if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) {
|
||||
return Err(anyhow!("not your dev server"))?;
|
||||
}
|
||||
|
||||
if project.room_id.is_some() {
|
||||
return Err(anyhow!("project already shared"))?;
|
||||
};
|
||||
|
||||
let project = project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(Some(room_id)),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
return Ok((project.id, room));
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::set(Some(participant.room_id)),
|
||||
host_user_id: ActiveValue::set(Some(participant.user_id)),
|
||||
@@ -101,8 +68,6 @@ impl Database {
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
dev_server_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -156,7 +121,6 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
user_id: Option<UserId>,
|
||||
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
@@ -172,25 +136,6 @@ impl Database {
|
||||
if project.host_connection()? == connection {
|
||||
return Ok((true, room, guest_connection_ids));
|
||||
}
|
||||
if let Some(dev_server_project_id) = project.dev_server_project_id {
|
||||
if let Some(user_id) = user_id {
|
||||
if user_id
|
||||
!= self
|
||||
.owner_for_dev_server_project(dev_server_project_id, &tx)
|
||||
.await?
|
||||
{
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
}
|
||||
project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
return Ok((false, room, guest_connection_ids));
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
})
|
||||
.await
|
||||
@@ -272,6 +217,16 @@ impl Database {
|
||||
update: &proto::UpdateWorktree,
|
||||
connection: ConnectionId,
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
if update.removed_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
|
||||
|| update.updated_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"invalid worktree update. removed entries: {}, updated entries: {}",
|
||||
update.removed_entries.len(),
|
||||
update.updated_entries.len()
|
||||
))?;
|
||||
}
|
||||
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
@@ -580,39 +535,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified hosted project
|
||||
pub async fn join_hosted_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<(Project, ReplicaId)> {
|
||||
self.transaction(|tx| async move {
|
||||
let (project, hosted_project) = project::Entity::find_by_id(id)
|
||||
.find_also_related(hosted_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("hosted project is no longer shared"))?;
|
||||
|
||||
let Some(hosted_project) = hosted_project else {
|
||||
return Err(anyhow!("project is not hosted"))?;
|
||||
};
|
||||
|
||||
let channel = channel::Entity::find_by_id(hosted_project.channel_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
let role = self
|
||||
.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(id)
|
||||
@@ -623,17 +545,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find()
|
||||
.filter(project::Column::DevServerProjectId.eq(id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified project
|
||||
/// in the current room.
|
||||
pub async fn join_project(
|
||||
@@ -644,13 +555,7 @@ impl Database {
|
||||
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, role) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadOnly,
|
||||
&tx,
|
||||
)
|
||||
.access_project(project_id, connection, Capability::ReadOnly, &tx)
|
||||
.await?;
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
@@ -841,54 +746,10 @@ impl Database {
|
||||
worktree_id: None,
|
||||
})
|
||||
.collect(),
|
||||
dev_server_project_id: project.dev_server_project_id,
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
}
|
||||
|
||||
pub async fn leave_hosted_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<LeftProject> {
|
||||
self.transaction(|tx| async move {
|
||||
let result = project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
return Err(anyhow!("not in the project"))?;
|
||||
}
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
Ok(LeftProject {
|
||||
id: project.id,
|
||||
connection_ids,
|
||||
should_unshare: false,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Removes the given connection from the specified project.
|
||||
pub async fn leave_project(
|
||||
&self,
|
||||
@@ -997,29 +858,14 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
principal_id: PrincipalId,
|
||||
capability: Capability,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(project::Model, ChannelRole)> {
|
||||
let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(dev_server_project::Entity)
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
let user_id = match principal_id {
|
||||
PrincipalId::DevServerId(_) => {
|
||||
if project
|
||||
.host_connection()
|
||||
.is_ok_and(|connection| connection == connection_id)
|
||||
{
|
||||
return Ok((project, ChannelRole::Admin));
|
||||
}
|
||||
return Err(anyhow!("not the project host"))?;
|
||||
}
|
||||
PrincipalId::UserId(user_id) => user_id,
|
||||
};
|
||||
|
||||
let role_from_room = if let Some(room_id) = project.room_id {
|
||||
room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
@@ -1030,34 +876,8 @@ impl Database {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
if user_id == dev_server.user_id {
|
||||
// If the user left the room "uncleanly" they may rejoin the
|
||||
// remote project before leave_room runs. IN that case kick
|
||||
// the project out of the room pre-emptively.
|
||||
if role_from_room.is_none() {
|
||||
project = project::Entity::update(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..project.into_active_model()
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
Some(ChannelRole::Admin)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let role = role_from_dev_server
|
||||
.or(role_from_room)
|
||||
.unwrap_or(ChannelRole::Banned);
|
||||
let role = role_from_room.unwrap_or(ChannelRole::Banned);
|
||||
|
||||
match capability {
|
||||
Capability::ReadWrite => {
|
||||
@@ -1080,17 +900,10 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<ConnectionId> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, _) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection_id,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadOnly,
|
||||
&tx,
|
||||
)
|
||||
.access_project(project_id, connection_id, Capability::ReadOnly, &tx)
|
||||
.await?;
|
||||
project.host_connection()
|
||||
})
|
||||
@@ -1103,17 +916,10 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<ConnectionId> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, _) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection_id,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadWrite,
|
||||
&tx,
|
||||
)
|
||||
.access_project(project_id, connection_id, Capability::ReadWrite, &tx)
|
||||
.await?;
|
||||
project.host_connection()
|
||||
})
|
||||
@@ -1121,47 +927,16 @@ impl Database {
|
||||
.map(|guard| guard.into_inner())
|
||||
}
|
||||
|
||||
/// Returns the host connection for a request to join a shared project.
|
||||
pub async fn host_for_owner_project_request(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
_connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<ConnectionId> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(dev_server_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
let Some(dev_server_project) = dev_server_project else {
|
||||
return Err(anyhow!("not a dev server project"))?;
|
||||
};
|
||||
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such dev server"))?;
|
||||
if dev_server.user_id != user_id {
|
||||
return Err(anyhow!("not your project"))?;
|
||||
}
|
||||
project.host_connection()
|
||||
})
|
||||
.await
|
||||
.map(|guard| guard.into_inner())
|
||||
}
|
||||
|
||||
pub async fn connections_for_buffer_update(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
principal_id: PrincipalId,
|
||||
connection_id: ConnectionId,
|
||||
capability: Capability,
|
||||
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
// Authorize
|
||||
let (project, _) = self
|
||||
.access_project(project_id, connection_id, principal_id, capability, &tx)
|
||||
.access_project(project_id, connection_id, capability, &tx)
|
||||
.await?;
|
||||
|
||||
let host_connection_id = project.host_connection()?;
|
||||
|
||||
@@ -858,25 +858,6 @@ impl Database {
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
|
||||
let dev_server_projects_for_user = self
|
||||
.dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let dev_server_projects_to_unshare = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::RoomId.eq(room_id))
|
||||
.add(
|
||||
project::Column::DevServerProjectId
|
||||
.is_in(dev_server_projects_for_user.clone()),
|
||||
),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|project| project.id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut left_projects = HashMap::default();
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
|
||||
@@ -899,9 +880,7 @@ impl Database {
|
||||
left_project.connection_ids.push(collaborator_connection_id);
|
||||
}
|
||||
|
||||
if (collaborator.is_host && collaborator.connection() == connection)
|
||||
|| dev_server_projects_to_unshare.contains(&collaborator.project_id)
|
||||
{
|
||||
if collaborator.is_host && collaborator.connection() == connection {
|
||||
left_project.should_unshare = true;
|
||||
}
|
||||
}
|
||||
@@ -944,17 +923,6 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if !dev_server_projects_to_unshare.is_empty() {
|
||||
project::Entity::update_many()
|
||||
.filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
|
||||
.set(project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let deleted = if room.participants.is_empty() {
|
||||
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
|
||||
@@ -1323,26 +1291,6 @@ impl Database {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
} else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
|
||||
let host = self
|
||||
.owner_for_dev_server_project(dev_server_project_id, tx)
|
||||
.await?;
|
||||
if let Some((_, participant)) = participants
|
||||
.iter_mut()
|
||||
.find(|(_, v)| v.user_id == host.to_proto())
|
||||
{
|
||||
participant.projects.push(proto::ParticipantProject {
|
||||
id: db_project.id.to_proto(),
|
||||
worktree_root_names: Default::default(),
|
||||
});
|
||||
let project = participant.projects.last_mut().unwrap();
|
||||
|
||||
for db_worktree in db_worktrees {
|
||||
if db_worktree.visible {
|
||||
project.worktree_root_names.push(db_worktree.root_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,14 +13,11 @@ pub mod channel_message;
|
||||
pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod dev_server;
|
||||
pub mod dev_server_project;
|
||||
pub mod embedding;
|
||||
pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod hosted_project;
|
||||
pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
use crate::db::{DevServerId, UserId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "dev_servers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: DevServerId,
|
||||
pub name: String,
|
||||
pub user_id: UserId,
|
||||
pub hashed_token: String,
|
||||
pub ssh_connection_string: Option<String>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::dev_server_project::Entity")]
|
||||
RemoteProject,
|
||||
}
|
||||
|
||||
impl Related<super::dev_server_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
|
||||
proto::DevServer {
|
||||
dev_server_id: self.id.to_proto(),
|
||||
name: self.name.clone(),
|
||||
status: status as i32,
|
||||
ssh_connection_string: self.ssh_connection_string.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
use super::project;
|
||||
use crate::db::{DevServerId, DevServerProjectId};
|
||||
use rpc::proto;
|
||||
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "dev_server_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: DevServerProjectId,
|
||||
pub dev_server_id: DevServerId,
|
||||
pub paths: JSONPaths,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
|
||||
pub struct JSONPaths(pub Vec<String>);
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::dev_server::Entity",
|
||||
from = "Column::DevServerId",
|
||||
to = "super::dev_server::Column::Id"
|
||||
)]
|
||||
DevServer,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::dev_server::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::DevServer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
|
||||
proto::DevServerProject {
|
||||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
path: self.paths().first().cloned().unwrap_or_default(),
|
||||
paths: self.paths().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn paths(&self) -> &Vec<String> {
|
||||
&self.paths.0
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
use crate::db::{ChannelId, ChannelVisibility, HostedProjectId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "hosted_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: HostedProjectId,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: String,
|
||||
pub visibility: ChannelVisibility,
|
||||
pub deleted_at: Option<DateTime>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -12,8 +12,6 @@ pub struct Model {
|
||||
pub host_user_id: Option<UserId>,
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
pub hosted_project_id: Option<HostedProjectId>,
|
||||
pub dev_server_project_id: Option<DevServerProjectId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -51,18 +49,6 @@ pub enum Relation {
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::hosted_project::Entity",
|
||||
from = "Column::HostedProjectId",
|
||||
to = "super::hosted_project::Column::Id"
|
||||
)]
|
||||
HostedProject,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::dev_server_project::Entity",
|
||||
from = "Column::DevServerProjectId",
|
||||
to = "super::dev_server_project::Column::Id"
|
||||
)]
|
||||
RemoteProject,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
@@ -95,16 +81,4 @@ impl Related<super::language_server::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::hosted_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::dev_server_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -121,11 +121,13 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
user_id: a_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
is_host: false,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
is_host: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
@@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc<Database>) {
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
// Projects shared by admins aren't counted.
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None)
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
@@ -449,6 +449,10 @@ async fn check_usage_limit(
|
||||
model_name: &str,
|
||||
claims: &LlmTokenClaims,
|
||||
) -> Result<()> {
|
||||
if claims.is_staff {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let model = state.db.model(provider, model_name)?;
|
||||
let usage = state
|
||||
.db
|
||||
@@ -513,11 +517,6 @@ async fn check_usage_limit(
|
||||
];
|
||||
|
||||
for (used, limit, usage_measure) in checks {
|
||||
// Temporarily bypass rate-limiting for staff members.
|
||||
if claims.is_staff {
|
||||
continue;
|
||||
}
|
||||
|
||||
if used > limit {
|
||||
let resource = match usage_measure {
|
||||
UsageMeasure::RequestsPerMinute => "requests_per_minute",
|
||||
|
||||
@@ -84,6 +84,8 @@ async fn main() -> Result<()> {
|
||||
|
||||
let config = envy::from_env::<Config>().expect("error loading config");
|
||||
init_tracing(&config);
|
||||
init_panic_hook();
|
||||
|
||||
let mut app = Router::new()
|
||||
.route("/", get(handle_root))
|
||||
.route("/healthz", get(handle_liveness_probe))
|
||||
@@ -378,3 +380,20 @@ pub fn init_tracing(config: &Config) -> Option<()> {
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn init_panic_hook() {
|
||||
std::panic::set_hook(Box::new(move |panic_info| {
|
||||
let panic_message = match panic_info.payload().downcast_ref::<&'static str>() {
|
||||
Some(message) => *message,
|
||||
None => match panic_info.payload().downcast_ref::<String>() {
|
||||
Some(message) => message.as_str(),
|
||||
None => "Box<Any>",
|
||||
},
|
||||
};
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
let location = panic_info
|
||||
.location()
|
||||
.map(|loc| format!("{}:{}", loc.file(), loc.line()));
|
||||
tracing::error!(panic = true, ?location, %panic_message, %backtrace, "Server Panic");
|
||||
}));
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
|
||||
use crate::db::{ChannelId, ChannelRole, UserId};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use rpc::{proto, ConnectionId};
|
||||
use rpc::ConnectionId;
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::Serialize;
|
||||
use std::fmt;
|
||||
@@ -11,9 +11,7 @@ use tracing::instrument;
|
||||
pub struct ConnectionPool {
|
||||
connections: BTreeMap<ConnectionId, Connection>,
|
||||
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
|
||||
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
|
||||
channels: ChannelPool,
|
||||
offline_dev_servers: HashSet<DevServerId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
@@ -32,13 +30,13 @@ impl fmt::Display for ZedVersion {
|
||||
|
||||
impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
self.0 >= SemanticVersion::new(0, 151, 0)
|
||||
self.0 >= SemanticVersion::new(0, 157, 0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Connection {
|
||||
pub principal_id: PrincipalId,
|
||||
pub user_id: UserId,
|
||||
pub admin: bool,
|
||||
pub zed_version: ZedVersion,
|
||||
}
|
||||
@@ -47,7 +45,6 @@ impl ConnectionPool {
|
||||
pub fn reset(&mut self) {
|
||||
self.connections.clear();
|
||||
self.connected_users.clear();
|
||||
self.connected_dev_servers.clear();
|
||||
self.channels.clear();
|
||||
}
|
||||
|
||||
@@ -66,7 +63,7 @@ impl ConnectionPool {
|
||||
self.connections.insert(
|
||||
connection_id,
|
||||
Connection {
|
||||
principal_id: PrincipalId::UserId(user_id),
|
||||
user_id,
|
||||
admin,
|
||||
zed_version,
|
||||
},
|
||||
@@ -75,25 +72,6 @@ impl ConnectionPool {
|
||||
connected_user.connection_ids.insert(connection_id);
|
||||
}
|
||||
|
||||
pub fn add_dev_server(
|
||||
&mut self,
|
||||
connection_id: ConnectionId,
|
||||
dev_server_id: DevServerId,
|
||||
zed_version: ZedVersion,
|
||||
) {
|
||||
self.connections.insert(
|
||||
connection_id,
|
||||
Connection {
|
||||
principal_id: PrincipalId::DevServerId(dev_server_id),
|
||||
admin: false,
|
||||
zed_version,
|
||||
},
|
||||
);
|
||||
|
||||
self.connected_dev_servers
|
||||
.insert(dev_server_id, connection_id);
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
|
||||
let connection = self
|
||||
@@ -101,28 +79,18 @@ impl ConnectionPool {
|
||||
.get_mut(&connection_id)
|
||||
.ok_or_else(|| anyhow!("no such connection"))?;
|
||||
|
||||
match connection.principal_id {
|
||||
PrincipalId::UserId(user_id) => {
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
self.channels.remove_user(&user_id);
|
||||
}
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
self.connected_dev_servers.remove(&dev_server_id);
|
||||
self.offline_dev_servers.remove(&dev_server_id);
|
||||
}
|
||||
}
|
||||
let user_id = connection.user_id;
|
||||
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
self.channels.remove_user(&user_id);
|
||||
};
|
||||
self.connections.remove(&connection_id).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
|
||||
self.offline_dev_servers.insert(dev_server_id);
|
||||
}
|
||||
|
||||
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
|
||||
self.connections.values()
|
||||
}
|
||||
@@ -147,42 +115,6 @@ impl ConnectionPool {
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
|
||||
if self.dev_server_connection_id(dev_server_id).is_some()
|
||||
&& !self.offline_dev_servers.contains(&dev_server_id)
|
||||
{
|
||||
proto::DevServerStatus::Online
|
||||
} else {
|
||||
proto::DevServerStatus::Offline
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
|
||||
self.connected_dev_servers.get(&dev_server_id).copied()
|
||||
}
|
||||
|
||||
pub fn online_dev_server_connection_id(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> Result<ConnectionId> {
|
||||
match self.connected_dev_servers.get(&dev_server_id) {
|
||||
Some(cid) => Ok(*cid),
|
||||
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dev_server_connection_id_supporting(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
required: ZedVersion,
|
||||
) -> Result<ConnectionId> {
|
||||
match self.connected_dev_servers.get(&dev_server_id) {
|
||||
Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
|
||||
Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
|
||||
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn channel_user_ids(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@@ -227,39 +159,22 @@ impl ConnectionPool {
|
||||
#[cfg(test)]
|
||||
pub fn check_invariants(&self) {
|
||||
for (connection_id, connection) in &self.connections {
|
||||
match &connection.principal_id {
|
||||
PrincipalId::UserId(user_id) => {
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
assert_eq!(
|
||||
self.connected_dev_servers.get(dev_server_id).unwrap(),
|
||||
connection_id
|
||||
);
|
||||
}
|
||||
}
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(&connection.user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
}
|
||||
|
||||
for (user_id, state) in &self.connected_users {
|
||||
for connection_id in &state.connection_ids {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().principal_id,
|
||||
PrincipalId::UserId(*user_id)
|
||||
self.connections.get(connection_id).unwrap().user_id,
|
||||
*user_id
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (dev_server_id, connection_id) in &self.connected_dev_servers {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().principal_id,
|
||||
PrincipalId::DevServerId(*dev_server_id)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ mod channel_buffer_tests;
|
||||
mod channel_guest_tests;
|
||||
mod channel_message_tests;
|
||||
mod channel_tests;
|
||||
mod dev_server_tests;
|
||||
mod editor_tests;
|
||||
mod following_tests;
|
||||
mod integration_tests;
|
||||
|
||||
@@ -95,7 +95,9 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
|
||||
let room_b = cx_b
|
||||
.read(ActiveCall::global)
|
||||
.update(cx_b, |call, _| call.room().unwrap().clone());
|
||||
cx_b.simulate_keystrokes("cmd-p 1 enter");
|
||||
cx_b.simulate_keystrokes("cmd-p");
|
||||
cx_a.run_until_parked();
|
||||
cx_b.simulate_keystrokes("1 enter");
|
||||
|
||||
let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| {
|
||||
(
|
||||
|
||||
@@ -1,643 +0,0 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use call::ActiveCall;
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{TestAppContext, VisualTestContext, WindowHandle};
|
||||
use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt};
|
||||
use serde_json::json;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
use crate::tests::{following_tests::join_channel, TestServer};
|
||||
|
||||
use super::TestClient;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client) = TestServer::start1(cx).await;
|
||||
|
||||
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server("server-1".to_string(), None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
store.update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().len(), 1);
|
||||
assert_eq!(store.dev_servers()[0].name, "server-1");
|
||||
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline);
|
||||
});
|
||||
|
||||
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
|
||||
cx.executor().run_until_parked();
|
||||
store.update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online);
|
||||
});
|
||||
|
||||
dev_server
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/remote",
|
||||
json!({
|
||||
"1.txt": "remote\nremote\nremote",
|
||||
"2.js": "function two() { return 2; }",
|
||||
"3.rs": "mod test",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let remote_workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].paths, vec!["/remote"]);
|
||||
workspace::join_dev_server_project(
|
||||
projects[0].id,
|
||||
projects[0].project_id.unwrap(),
|
||||
client.app_state.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
|
||||
let editor = remote_workspace
|
||||
.update(cx, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
})
|
||||
.unwrap();
|
||||
editor.update(cx, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
});
|
||||
cx.simulate_input("wow!");
|
||||
cx.simulate_keystrokes("cmd-s");
|
||||
|
||||
let content = dev_server
|
||||
.fs()
|
||||
.load(Path::new("/remote/1.txt"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(content, "wow!remote\nremote\nremote\n");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_env_files(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
|
||||
cx1.simulate_keystrokes("cmd-p . e enter");
|
||||
|
||||
let editor = remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
})
|
||||
.unwrap();
|
||||
editor.update(cx1, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "SECRET");
|
||||
});
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
let (workspace2, cx2) = client2.active_workspace(cx2);
|
||||
let editor = workspace2.update(cx2, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
});
|
||||
// TODO: it'd be nice to hide .env files from other people
|
||||
editor.update(cx2, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "SECRET");
|
||||
});
|
||||
}
|
||||
|
||||
async fn create_dev_server_project(
|
||||
server: &TestServer,
|
||||
client_app_state: Arc<AppState>,
|
||||
cx: &mut TestAppContext,
|
||||
cx_devserver: &mut TestAppContext,
|
||||
) -> (TestClient, WindowHandle<Workspace>) {
|
||||
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server("server-1".to_string(), None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let dev_server = server
|
||||
.create_dev_server(resp.access_token, cx_devserver)
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
dev_server
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/remote",
|
||||
json!({
|
||||
"1.txt": "remote\nremote\nremote",
|
||||
".env": "SECRET",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
store
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let workspace = store
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].paths, vec!["/remote"]);
|
||||
workspace::join_dev_server_project(
|
||||
projects[0].id,
|
||||
projects[0].project_id.unwrap(),
|
||||
client_app_state,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
(dev_server, workspace)
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_leave_room(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_delete(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_server_projects().len(), 0);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_rename(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.rename_dev_server(
|
||||
store.dev_servers().first().unwrap().id,
|
||||
"name-edited".to_string(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().first().unwrap().name, "name-edited");
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_refresh_access_token(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
cx4: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
join_channel(channel_id, &client2, cx2).await.unwrap();
|
||||
cx2.executor().run_until_parked();
|
||||
|
||||
// Regenerate the access token
|
||||
let new_token_response = cx1
|
||||
.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
|
||||
store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Assert that the other client was disconnected
|
||||
let (workspace, cx2) = client2.active_workspace(cx2);
|
||||
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
|
||||
|
||||
// Assert that the owner of the dev server does not see the dev server as online anymore
|
||||
let (workspace, cx1) = client1.active_workspace(cx1);
|
||||
cx1.update(|cx| {
|
||||
assert!(workspace.read(cx).project().read(cx).is_disconnected(cx));
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(
|
||||
store.dev_servers().first().unwrap().status,
|
||||
DevServerStatus::Offline
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
// Reconnect the dev server with the new token
|
||||
let _dev_server = server
|
||||
.create_dev_server(new_token_response.access_token, cx4)
|
||||
.await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Assert that the dev server is online again
|
||||
cx1.update(|cx| {
|
||||
dev_server_projects::Store::global(cx).update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers().len(), 1);
|
||||
assert_eq!(
|
||||
store.dev_servers().first().unwrap().status,
|
||||
DevServerStatus::Online
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_reconnect(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (mut server, client1) = TestServer::start1(cx1).await;
|
||||
let channel_id = server
|
||||
.make_channel("test", None, (&client1, cx1), &mut [])
|
||||
.await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
|
||||
|
||||
cx1.update(|cx| {
|
||||
workspace::join_channel(
|
||||
channel_id,
|
||||
client1.app_state.clone(),
|
||||
Some(remote_workspace),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
remote_workspace
|
||||
.update(cx1, |ws, cx| {
|
||||
assert!(ws.project().read(cx).is_shared());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
drop(client1);
|
||||
|
||||
let client2 = server.create_client(cx2, "user_a").await;
|
||||
|
||||
let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
|
||||
store
|
||||
.update(cx2, |store, cx| {
|
||||
let projects = store.dev_server_projects();
|
||||
workspace::join_dev_server_project(
|
||||
projects[0].id,
|
||||
projects[0].project_id.unwrap(),
|
||||
client2.app_state.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
|
||||
|
||||
server.reset().await;
|
||||
cx.run_until_parked();
|
||||
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
remote_workspace
|
||||
.update(cx, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx)
|
||||
.unwrap()
|
||||
.update(cx, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_dev_server_project_path_validation(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
cx2: &mut gpui::TestAppContext,
|
||||
cx3: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
let _channel_id = server
|
||||
.make_channel("test", None, (&client1, cx1), &mut [])
|
||||
.await;
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (_dev_server, _) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone());
|
||||
|
||||
let resp = store
|
||||
.update(cx1, |store, cx| {
|
||||
store.create_dev_server("server-2".to_string(), None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let _dev_server = server.create_dev_server(resp.access_token, cx3).await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
// Creating a remote project with a path that does not exist should fail
|
||||
let result = store
|
||||
.update(cx1, |store, cx| {
|
||||
store.create_dev_server_project(
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"/notfound".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
cx1.executor().run_until_parked();
|
||||
|
||||
let error = result.unwrap_err();
|
||||
assert!(matches!(
|
||||
error.error_code(),
|
||||
ErrorCode::DevServerProjectPathDoesNotExist
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
|
||||
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
cx.simulate_keystrokes("cmd-shift-s");
|
||||
cx.simulate_input("2.txt");
|
||||
cx.simulate_keystrokes("enter");
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let title = remote_workspace
|
||||
.update(&mut cx, |ws, cx| {
|
||||
let active_item = ws.active_item(cx).unwrap();
|
||||
active_item.tab_description(0, cx).unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(title, "2.txt");
|
||||
|
||||
let path = Path::new("/remote/2.txt");
|
||||
assert_eq!(
|
||||
dev_server.fs().load(path).await.unwrap(),
|
||||
"remote\nremote\nremote"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
// Creating a project with a path that does exist should not fail
|
||||
let (dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
|
||||
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
|
||||
|
||||
cx.simulate_keystrokes("cmd-n");
|
||||
cx.simulate_input("new!");
|
||||
cx.simulate_keystrokes("cmd-shift-s");
|
||||
cx.simulate_input("2.txt");
|
||||
cx.simulate_keystrokes("enter");
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let title = remote_workspace
|
||||
.update(&mut cx, |ws, cx| {
|
||||
ws.active_item(cx).unwrap().tab_description(0, cx).unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(title, "2.txt");
|
||||
|
||||
let path = Path::new("/remote/2.txt");
|
||||
assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!");
|
||||
}
|
||||
@@ -1978,6 +1978,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
enabled: false,
|
||||
delay_ms: None,
|
||||
min_column: None,
|
||||
show_commit_summary: false,
|
||||
});
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
|
||||
@@ -1589,8 +1589,9 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.await;
|
||||
let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await;
|
||||
|
||||
cx_a.simulate_keystrokes("cmd-p 2 enter");
|
||||
cx_a.simulate_keystrokes("cmd-p");
|
||||
cx_a.run_until_parked();
|
||||
cx_a.simulate_keystrokes("2 enter");
|
||||
|
||||
let editor_a = workspace_a.update(cx_a, |workspace, cx| {
|
||||
workspace.active_item_as::<Editor>(cx).unwrap()
|
||||
@@ -1956,9 +1957,10 @@ async fn test_following_to_channel_notes_without_a_shared_project(
|
||||
});
|
||||
channel_notes_1_b.update(cx_b, |notes, cx| {
|
||||
assert_eq!(notes.channel(cx).unwrap().name, "channel-1");
|
||||
let editor = notes.editor.read(cx);
|
||||
assert_eq!(editor.text(cx), "Hello from A.");
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), &[3..4]);
|
||||
notes.editor.update(cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Hello from A.");
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), &[3..4]);
|
||||
})
|
||||
});
|
||||
|
||||
// Client A opens the notes for channel 2.
|
||||
@@ -2041,7 +2043,9 @@ async fn test_following_to_channel_notes_other_workspace(
|
||||
share_workspace(&workspace_a, cx_a).await.unwrap();
|
||||
|
||||
// a opens 1.txt
|
||||
cx_a.simulate_keystrokes("cmd-p 1 enter");
|
||||
cx_a.simulate_keystrokes("cmd-p");
|
||||
cx_a.run_until_parked();
|
||||
cx_a.simulate_keystrokes("1 enter");
|
||||
cx_a.run_until_parked();
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
let editor = workspace.active_item(cx).unwrap();
|
||||
@@ -2098,7 +2102,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
share_workspace(&workspace_a, cx_a).await.unwrap();
|
||||
|
||||
// a opens 1.txt
|
||||
cx_a.simulate_keystrokes("cmd-p 1 enter");
|
||||
cx_a.simulate_keystrokes("cmd-p");
|
||||
cx_a.run_until_parked();
|
||||
cx_a.simulate_keystrokes("1 enter");
|
||||
cx_a.run_until_parked();
|
||||
workspace_a.update(cx_a, |workspace, cx| {
|
||||
let editor = workspace.active_item(cx).unwrap();
|
||||
@@ -2118,7 +2124,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
cx_b.simulate_keystrokes("down");
|
||||
|
||||
// a opens a different file while not followed
|
||||
cx_a.simulate_keystrokes("cmd-p 2 enter");
|
||||
cx_a.simulate_keystrokes("cmd-p");
|
||||
cx_a.run_until_parked();
|
||||
cx_a.simulate_keystrokes("2 enter");
|
||||
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
|
||||
@@ -2128,7 +2136,9 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
// a opens a file in a new window
|
||||
let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await;
|
||||
cx_a2.update(|cx| cx.activate_window());
|
||||
cx_a2.simulate_keystrokes("cmd-p 3 enter");
|
||||
cx_a2.simulate_keystrokes("cmd-p");
|
||||
cx_a2.run_until_parked();
|
||||
cx_a2.simulate_keystrokes("3 enter");
|
||||
cx_a2.run_until_parked();
|
||||
|
||||
// b starts following a again
|
||||
|
||||
@@ -21,8 +21,8 @@ use language::{
|
||||
language_settings::{
|
||||
AllLanguageSettings, Formatter, FormatterList, PrettierSettings, SelectedFormatter,
|
||||
},
|
||||
tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, FakeLspAdapter,
|
||||
Language, LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
@@ -4461,7 +4461,7 @@ async fn test_prettier_formatting_buffer(
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
)));
|
||||
let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
|
||||
"TypeScript",
|
||||
@@ -6575,3 +6575,95 @@ async fn test_context_collaboration_with_reconnect(
|
||||
assert!(context.buffer().read(cx).read_only());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_remote_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
client_a
|
||||
.fs()
|
||||
.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/project", cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
// Client A sees that a guest has joined.
|
||||
executor.run_until_parked();
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let host_branch = cx_a.update(|cx| {
|
||||
project_a.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(host_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
@@ -1,14 +1,27 @@
|
||||
use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use collections::HashSet;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{BackgroundExecutor, Context as _, TestAppContext, UpdateGlobal as _};
|
||||
use http_client::BlockedHttpClient;
|
||||
use language::{language_settings::language_settings, LanguageRegistry};
|
||||
use language::{
|
||||
language_settings::{
|
||||
language_settings, AllLanguageSettings, Formatter, FormatterList, PrettierSettings,
|
||||
SelectedFormatter,
|
||||
},
|
||||
tree_sitter_typescript, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
|
||||
LanguageRegistry,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::ProjectPath;
|
||||
use project::{
|
||||
lsp_store::{FormatTarget, FormatTrigger},
|
||||
ProjectPath,
|
||||
};
|
||||
use remote::SshRemoteClient;
|
||||
use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -26,7 +39,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
.await;
|
||||
|
||||
// Set up project on remote FS
|
||||
let (port, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree(
|
||||
@@ -67,7 +80,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(port, cx_a).await;
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/code/project1", client_ssh, cx_a)
|
||||
.await;
|
||||
@@ -174,3 +187,311 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_git_branches(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Set up project on remote FS
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ ".git":{} }))
|
||||
.await;
|
||||
|
||||
let branches = ["main", "dev", "feature-1"];
|
||||
remote_fs.insert_branches(Path::new("/project/.git"), &branches);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
|
||||
// Give client A sometime to see that B has joined, and that the headless server
|
||||
// has some git repositories
|
||||
executor.run_until_parked();
|
||||
|
||||
let root_path = ProjectPath::root_path(worktree_id);
|
||||
|
||||
let branches_b = cx_b
|
||||
.update(|cx| project_b.update(cx, |project, cx| project.branches(root_path.clone(), cx)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let new_branch = branches[2];
|
||||
|
||||
let branches_b = branches_b
|
||||
.into_iter()
|
||||
.map(|branch| branch.name)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(&branches_b, &branches);
|
||||
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), new_branch.to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store
|
||||
.current_branch(root_path.clone(), cx)
|
||||
.unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), branches[2]);
|
||||
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
project_b.update(cx, |project, cx| {
|
||||
project.update_or_create_branch(root_path.clone(), "totally-new-branch".to_string(), cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
|
||||
let server_branch = server_cx.update(|cx| {
|
||||
headless_project.update(cx, |headless_project, cx| {
|
||||
headless_project
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store.current_branch(root_path, cx).unwrap()
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.as_ref(), "totally-new-branch");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
executor: BackgroundExecutor,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.set_name("a");
|
||||
cx_b.set_name("b");
|
||||
server_cx.set_name("server");
|
||||
|
||||
let mut server = TestServer::start(executor.clone()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
let buffer_text = "let one = \"two\"";
|
||||
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ "a.ts": buffer_text }))
|
||||
.await;
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
let ts_lang = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
name: "TypeScript".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["ts".to_string()],
|
||||
..LanguageMatcher::default()
|
||||
},
|
||||
..LanguageConfig::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
|
||||
));
|
||||
client_a.language_registry().add(ts_lang.clone());
|
||||
client_b.language_registry().add(ts_lang.clone());
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let mut fake_language_servers = languages.register_fake_lsp(
|
||||
"TypeScript",
|
||||
FakeLspAdapter {
|
||||
prettier_plugins: vec![test_plugin],
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let _headless_project = server_cx.new_model(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
languages,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User B joins the project.
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
executor.run_until_parked();
|
||||
|
||||
// Opens the buffer and formats it
|
||||
let buffer_b = project_b
|
||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user B opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::LanguageServer { name: None }].into(),
|
||||
)));
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
|
||||
panic!(
|
||||
"Unexpected: prettier should be preferred since it's enabled and language supports it"
|
||||
)
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after client's request"
|
||||
);
|
||||
|
||||
// User A opens and formats the same buffer too
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.ts"), cx))
|
||||
.await
|
||||
.expect("user A opens buffer for formatting");
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.defaults.prettier = Some(PrettierSettings {
|
||||
allowed: true,
|
||||
..PrettierSettings::default()
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_a.clone()]),
|
||||
true,
|
||||
FormatTrigger::Manual,
|
||||
FormatTarget::Buffer,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix,
|
||||
"Prettier formatting was not applied to client buffer after host's request"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::{
|
||||
auth::split_dev_server_token,
|
||||
db::{tests::TestDb, NewUserParams, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||
@@ -204,7 +203,7 @@ impl TestServer {
|
||||
.override_authenticate(move |cx| {
|
||||
cx.spawn(|_| async move {
|
||||
let access_token = "the-token".to_string();
|
||||
Ok(Credentials::User {
|
||||
Ok(Credentials {
|
||||
user_id: user_id.to_proto(),
|
||||
access_token,
|
||||
})
|
||||
@@ -213,7 +212,7 @@ impl TestServer {
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(
|
||||
credentials,
|
||||
&Credentials::User {
|
||||
&Credentials {
|
||||
user_id: user_id.0 as u64,
|
||||
access_token: "the-token".into()
|
||||
}
|
||||
@@ -297,7 +296,6 @@ impl TestServer {
|
||||
collab_ui::init(&app_state, cx);
|
||||
file_finder::init(cx);
|
||||
menu::init();
|
||||
dev_server_projects::init(client.clone(), cx);
|
||||
settings::KeymapFile::load_asset(os_keymap, cx).unwrap();
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
assistant::context_store::init(&client.clone().into());
|
||||
@@ -319,135 +317,6 @@ impl TestServer {
|
||||
client
|
||||
}
|
||||
|
||||
pub async fn create_dev_server(
|
||||
&self,
|
||||
access_token: String,
|
||||
cx: &mut TestAppContext,
|
||||
) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
if cx.has_global::<SettingsStore>() {
|
||||
panic!("Same cx used to create two test clients")
|
||||
}
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
});
|
||||
let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap();
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx));
|
||||
let server = self.server.clone();
|
||||
let db = self.app_state.db.clone();
|
||||
let connection_killers = self.connection_killers.clone();
|
||||
let forbid_connections = self.forbid_connections.clone();
|
||||
Arc::get_mut(&mut client)
|
||||
.unwrap()
|
||||
.set_id(1)
|
||||
.set_dev_server_token(client::DevServerToken(access_token.clone()))
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(
|
||||
credentials,
|
||||
&Credentials::DevServer {
|
||||
token: client::DevServerToken(access_token.to_string())
|
||||
}
|
||||
);
|
||||
|
||||
let server = server.clone();
|
||||
let db = db.clone();
|
||||
let connection_killers = connection_killers.clone();
|
||||
let forbid_connections = forbid_connections.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
if forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))
|
||||
} else {
|
||||
let (client_conn, server_conn, killed) =
|
||||
Connection::in_memory(cx.background_executor().clone());
|
||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||
let dev_server = db
|
||||
.get_dev_server(dev_server_id)
|
||||
.await
|
||||
.expect("retrieving dev_server failed");
|
||||
cx.background_executor()
|
||||
.spawn(server.handle_connection(
|
||||
server_conn,
|
||||
"dev-server".to_string(),
|
||||
Principal::DevServer(dev_server),
|
||||
ZedVersion(SemanticVersion::new(1, 0, 0)),
|
||||
None,
|
||||
Some(connection_id_tx),
|
||||
Executor::Deterministic(cx.background_executor().clone()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.map_err(|e| {
|
||||
EstablishConnectionError::Other(anyhow!(
|
||||
"{} (is server shutting down?)",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
connection_killers
|
||||
.lock()
|
||||
.insert(connection_id.into(), killed);
|
||||
Ok(client_conn)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let session = cx.new_model(|cx| AppSession::new(Session::test(), cx));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
workspace_store,
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
session,
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init(&client, cx);
|
||||
client::init(&client, cx);
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client, user_store.clone(), cx);
|
||||
notifications::init(client.clone(), user_store, cx);
|
||||
collab_ui::init(&app_state, cx);
|
||||
file_finder::init(cx);
|
||||
menu::init();
|
||||
headless::init(
|
||||
client.clone(),
|
||||
headless::AppState {
|
||||
languages: app_state.languages.clone(),
|
||||
user_store: app_state.user_store.clone(),
|
||||
fs: fs.clone(),
|
||||
node_runtime: app_state.node_runtime.clone(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
TestClient {
|
||||
app_state,
|
||||
username: "dev-server".to_string(),
|
||||
channel_store: cx.read(ChannelStore::global).clone(),
|
||||
notification_store: cx.read(NotificationStore::global).clone(),
|
||||
state: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disconnect_client(&self, peer_id: PeerId) {
|
||||
self.connection_killers
|
||||
.lock()
|
||||
|
||||
@@ -5,7 +5,7 @@ use self::channel_modal::ChannelModal;
|
||||
use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
use client::{ChannelId, Client, Contact, User, UserStore};
|
||||
use contact_finder::ContactFinder;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
@@ -182,10 +182,6 @@ enum ListEntry {
|
||||
ChannelEditor {
|
||||
depth: usize,
|
||||
},
|
||||
HostedProject {
|
||||
id: ProjectId,
|
||||
name: SharedString,
|
||||
},
|
||||
Contact {
|
||||
contact: Arc<Contact>,
|
||||
calling: bool,
|
||||
@@ -566,7 +562,6 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
let hosted_projects = channel_store.projects_for_id(channel.id);
|
||||
let has_children = channel_store
|
||||
.channel_at_index(mat.candidate_id + 1)
|
||||
.map_or(false, |next_channel| {
|
||||
@@ -600,10 +595,6 @@ impl CollabPanel {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (name, id) in hosted_projects {
|
||||
self.entries.push(ListEntry::HostedProject { id, name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1029,40 +1020,6 @@ impl CollabPanel {
|
||||
.tooltip(move |cx| Tooltip::text("Open Chat", cx))
|
||||
}
|
||||
|
||||
fn render_channel_project(
|
||||
&self,
|
||||
id: ProjectId,
|
||||
name: &SharedString,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
ListItem::new(ElementId::NamedInteger(
|
||||
"channel-project".into(),
|
||||
id.0 as usize,
|
||||
))
|
||||
.indent_level(2)
|
||||
.indent_step_size(px(20.))
|
||||
.selected(is_selected)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
if let Some(workspace) = this.workspace.upgrade() {
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
workspace::join_hosted_project(id, app_state, cx).detach_and_prompt_err(
|
||||
"Failed to open project",
|
||||
cx,
|
||||
|_, _| None,
|
||||
)
|
||||
}
|
||||
}))
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(IconButton::new(0, IconName::FileTree)),
|
||||
)
|
||||
.child(Label::new(name.clone()))
|
||||
.tooltip(move |cx| Tooltip::text("Open Project", cx))
|
||||
}
|
||||
|
||||
fn has_subchannels(&self, ix: usize) -> bool {
|
||||
self.entries.get(ix).map_or(false, |entry| {
|
||||
if let ListEntry::Channel { has_children, .. } = entry {
|
||||
@@ -1538,12 +1495,6 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => {
|
||||
self.join_channel_chat(*channel_id, cx)
|
||||
}
|
||||
ListEntry::HostedProject {
|
||||
id: _id,
|
||||
name: _name,
|
||||
} => {
|
||||
// todo()
|
||||
}
|
||||
ListEntry::OutgoingRequest(_) => {}
|
||||
ListEntry::ChannelEditor { .. } => {}
|
||||
}
|
||||
@@ -2157,10 +2108,6 @@ impl CollabPanel {
|
||||
ListEntry::ChannelChat { channel_id } => self
|
||||
.render_channel_chat(*channel_id, is_selected, cx)
|
||||
.into_any_element(),
|
||||
|
||||
ListEntry::HostedProject { id, name } => self
|
||||
.render_channel_project(*id, name, is_selected, cx)
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2779,7 +2726,7 @@ impl Render for CollabPanel {
|
||||
.on_action(cx.listener(CollabPanel::collapse_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::expand_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::start_move_selected_channel))
|
||||
.track_focus(&self.focus_handle)
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.size_full()
|
||||
.child(if self.user_store.read(cx).current_user().is_none() {
|
||||
self.render_signed_out(cx)
|
||||
@@ -2898,11 +2845,6 @@ impl PartialEq for ListEntry {
|
||||
return channel_1.id == channel_2.id;
|
||||
}
|
||||
}
|
||||
ListEntry::HostedProject { id, .. } => {
|
||||
if let ListEntry::HostedProject { id: other_id, .. } = other {
|
||||
return id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user