Compare commits

..

4 Commits

Author SHA1 Message Date
Piotr Osiewicz
217fc298ed Merge branch 'main' into language-toolchains 2024-10-18 14:26:50 +02:00
Piotr Osiewicz
cf304df0dc WIP 2024-10-17 13:27:12 +02:00
Piotr Osiewicz
7b8463b566 💄 2024-10-17 11:16:07 +02:00
Piotr Osiewicz
241a73df54 wip 2024-10-17 10:58:42 +02:00
301 changed files with 10507 additions and 11058 deletions

View File

@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

View File

@@ -18,7 +18,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
ref: ${{ github.event.inputs.branch }}
ssh-key: ${{ secrets.ZED_BOT_DEPLOY_KEY }}

View File

@@ -14,7 +14,6 @@ on:
- "**"
paths-ignore:
- "docs/**"
- ".github/workflows/community_*"
concurrency:
# Allow only one workflow per any non-`main` branch.
@@ -36,7 +35,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0 # fetch full history
@@ -78,26 +77,25 @@ jobs:
- buildjet-8vcpu-ubuntu-2204
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
- name: Run style checks
uses: ./.github/actions/check_style
- name: Check for typos
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
macos_tests:
timeout-minutes: 60
name: (macOS) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -127,7 +125,6 @@ jobs:
linux_tests:
timeout-minutes: 60
name: (Linux) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -135,12 +132,12 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "buildjet"
@@ -160,7 +157,6 @@ jobs:
build_remote_server:
timeout-minutes: 60
name: (Linux) Build Remote Server
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204
steps:
@@ -168,12 +164,12 @@ jobs:
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "buildjet"
@@ -188,16 +184,15 @@ jobs:
windows_tests:
timeout-minutes: 60
name: (Windows) Run Clippy and tests
if: github.repository_owner == 'zed-industries'
runs-on: hosted-windows-1
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "github"
@@ -233,7 +228,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
# We need to fetch more than one commit so that `script/draft-release-notes`
# is able to diff between the current and previous tag.
@@ -271,20 +266,20 @@ jobs:
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (universal) to workflow run if main branch or specific label
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
path: target/release/Zed.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
@@ -318,7 +313,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -335,7 +330,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
@@ -365,7 +360,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -382,7 +377,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
uses: actions/upload-artifact@604373da6381bf24206979c74d06a550515601b9 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
@@ -390,6 +385,7 @@ jobs:
- name: Upload app bundle to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
with:
draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}

View File

@@ -14,10 +14,10 @@ jobs:
stale-issue-message: >
Hi there! 👋
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days.
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 7 days. Feel free to open a new issue if you're seeing this message after the issue has been closed.
Thanks for your help!
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, feel free to ping a Zed team member to reopen this issue or open a new one."
# We will increase `days-before-stale` to 365 on or after Jan 24th,
# 2024. This date marks one year since migrating issues from
# 'community' to 'zed' repository. The migration added activity to all

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:

View File

@@ -13,7 +13,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -17,7 +17,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -71,7 +71,7 @@ jobs:
run: doctl registry login
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -97,7 +97,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
with:
@@ -31,7 +31,7 @@ jobs:
}
- name: Check for Typos with Typos-CLI
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
uses: crate-ci/typos@v1.24.6
with:
config: ./typos.toml
files: ./docs/

View File

@@ -16,12 +16,12 @@ jobs:
- ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
- name: Cache dependencies
uses: swatinem/rust-cache@82a92a6e8fbeee089604da2575dc567ae9ddeaab # v2
uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
cache-provider: "github"

View File

@@ -27,7 +27,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false

View File

@@ -23,7 +23,7 @@ jobs:
- test
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
fetch-depth: 0
@@ -44,7 +44,7 @@ jobs:
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -75,7 +75,7 @@ jobs:
node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -109,7 +109,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -149,7 +149,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
clean: false
@@ -182,7 +182,7 @@ jobs:
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
with:
fetch-depth: 0

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4
- name: Set up uv
uses: astral-sh/setup-uv@f3bcaebff5eace81a1c062af9f9011aae482ca9d # v3
with:

117
Cargo.lock generated
View File

@@ -261,9 +261,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4"
[[package]]
name = "anyhow"
version = "1.0.91"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
[[package]]
name = "approx"
@@ -453,11 +453,9 @@ dependencies = [
"anyhow",
"collections",
"derive_more",
"futures 0.3.30",
"gpui",
"language",
"parking_lot",
"pretty_assertions",
"serde",
"serde_json",
"workspace",
@@ -1011,7 +1009,6 @@ dependencies = [
"smol",
"tempfile",
"util",
"which 6.0.3",
"workspace",
]
@@ -1580,7 +1577,7 @@ dependencies = [
"bitflags 2.6.0",
"cexpr",
"clang-sys",
"itertools 0.10.5",
"itertools 0.12.1",
"lazy_static",
"lazycell",
"proc-macro2",
@@ -2550,6 +2547,7 @@ dependencies = [
"ctor",
"dashmap 6.0.1",
"derive_more",
"dev_server_projects",
"editor",
"env_logger",
"envy",
@@ -2560,6 +2558,7 @@ dependencies = [
"git_hosting_providers",
"google_ai",
"gpui",
"headless",
"hex",
"http_client",
"hyper 0.14.30",
@@ -3474,6 +3473,18 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "dev_server_projects"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"gpui",
"rpc",
"serde",
"serde_json",
]
[[package]]
name = "diagnostics"
version = "0.1.0"
@@ -3638,12 +3649,6 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
[[package]]
name = "ec4rs"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acf65d056c7da9c971c2847ce250fd1f0f9659d5718845c3ec0ad95f5668352c"
[[package]]
name = "ecdsa"
version = "0.14.8"
@@ -5259,6 +5264,28 @@ dependencies = [
"http 0.2.12",
]
[[package]]
name = "headless"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"extension",
"fs",
"futures 0.3.30",
"gpui",
"language",
"log",
"node_runtime",
"postage",
"project",
"proto",
"settings",
"shellexpand 2.1.2",
"signal-hook",
"util",
]
[[package]]
name = "heck"
version = "0.3.3"
@@ -5554,7 +5581,7 @@ dependencies = [
"httpdate",
"itoa",
"pin-project-lite",
"socket2 0.4.10",
"socket2 0.5.7",
"tokio",
"tower-service",
"tracing",
@@ -6183,7 +6210,6 @@ dependencies = [
"clock",
"collections",
"ctor",
"ec4rs",
"env_logger",
"futures 0.3.30",
"fuzzy",
@@ -6197,7 +6223,6 @@ dependencies = [
"lsp",
"parking_lot",
"postage",
"pretty_assertions",
"pulldown-cmark 0.12.1",
"rand 0.8.5",
"regex",
@@ -6439,7 +6464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets 0.48.5",
"windows-targets 0.52.6",
]
[[package]]
@@ -8406,6 +8431,7 @@ dependencies = [
"client",
"clock",
"collections",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -8477,7 +8503,6 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
"smallvec",
"theme",
"ui",
"util",
@@ -8943,6 +8968,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"auto_update",
"client",
"dev_server_projects",
"editor",
"file_finder",
"futures 0.3.30",
@@ -8951,7 +8978,6 @@ dependencies = [
"itertools 0.13.0",
"language",
"log",
"markdown",
"menu",
"ordered-float 2.10.1",
"paths",
@@ -8959,13 +8985,14 @@ dependencies = [
"project",
"release_channel",
"remote",
"rpc",
"schemars",
"serde",
"serde_json",
"settings",
"smol",
"task",
"theme",
"terminal_view",
"ui",
"util",
"workspace",
@@ -9092,7 +9119,6 @@ name = "remote"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"collections",
"fs",
"futures 0.3.30",
@@ -9122,7 +9148,6 @@ dependencies = [
"client",
"clock",
"env_logger",
"fork",
"fs",
"futures 0.3.30",
"git",
@@ -9131,7 +9156,6 @@ dependencies = [
"http_client",
"language",
"languages",
"libc",
"log",
"lsp",
"node_runtime",
@@ -9279,7 +9303,6 @@ dependencies = [
"system-configuration 0.6.1",
"tokio",
"tokio-rustls 0.26.0",
"tokio-socks",
"tokio-util",
"tower-service",
"url",
@@ -10277,7 +10300,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
"ec4rs",
"fs",
"futures 0.3.30",
"gpui",
@@ -11870,6 +11892,7 @@ dependencies = [
"client",
"collections",
"command_palette",
"dev_server_projects",
"editor",
"extensions_ui",
"feature_flags",
@@ -11978,7 +12001,6 @@ dependencies = [
"futures-io",
"futures-util",
"thiserror",
"tokio",
]
[[package]]
@@ -12403,7 +12425,7 @@ checksum = "2545046bd1473dac6c626659cc2567c6c0ff302fc8b84a56c4243378276f7f57"
[[package]]
name = "tree-sitter-md"
version = "0.3.2"
source = "git+https://github.com/tree-sitter-grammars/tree-sitter-markdown?rev=9a23c1a96c0513d8fc6520972beedd419a973539#9a23c1a96c0513d8fc6520972beedd419a973539"
source = "git+https://github.com/zed-industries/tree-sitter-markdown?rev=4cfa6aad6b75052a5077c80fd934757d9267d81b#4cfa6aad6b75052a5077c80fd934757d9267d81b"
dependencies = [
"cc",
"tree-sitter-language",
@@ -13387,9 +13409,9 @@ dependencies = [
[[package]]
name = "wasmtime-wasi"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fda03f5bfd5c4cc09f75c7e44846663f25f2c48a2d688fbfb5c7a33af6cf34f5"
checksum = "545ae8298ffce025604f7480f9c7d6948c985bef7ce9aee249ef79307813e83c"
dependencies = [
"anyhow",
"async-trait",
@@ -13642,9 +13664,9 @@ dependencies = [
[[package]]
name = "wiggle"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d3b31bd2b4d2d82a4b747b8dbc45f566214214a4ffdc5690429a73bc221dc8a"
checksum = "cc850ca3c02c5835934d23f28cec4c5a3fb66fe0b4ecd968bbb35609dda5ddc0"
dependencies = [
"anyhow",
"async-trait",
@@ -13657,9 +13679,9 @@ dependencies = [
[[package]]
name = "wiggle-generate"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2c6136b195fc12067aa9d4e7a5baf118729394df7bc7cbf8c63119bc9f2a7cd"
checksum = "634b8804a67200bcb43ea8af5f7c53e862439a086b68b16fd333454bc74d5aab"
dependencies = [
"anyhow",
"heck 0.4.1",
@@ -13672,9 +13694,9 @@ dependencies = [
[[package]]
name = "wiggle-macro"
version = "24.0.1"
version = "24.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a41eaceee468da976ac43b85c4eb82e482f828d5e8e56f49f90dfac2d9bc3b4"
checksum = "474b7cbdb942c74031e619d66c600bba7f73867c5800fc2c2306cf307649be2f"
dependencies = [
"proc-macro2",
"quote",
@@ -13704,7 +13726,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -14266,6 +14288,7 @@ dependencies = [
"collections",
"db",
"derive_more",
"dev_server_projects",
"env_logger",
"fs",
"futures 0.3.30",
@@ -14560,7 +14583,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.160.0"
version = "0.159.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -14584,6 +14607,7 @@ dependencies = [
"command_palette_hooks",
"copilot",
"db",
"dev_server_projects",
"diagnostics",
"editor",
"env_logger",
@@ -14599,6 +14623,7 @@ dependencies = [
"git_hosting_providers",
"go_to_line",
"gpui",
"headless",
"http_client",
"image_viewer",
"inline_completion_button",
@@ -14666,6 +14691,7 @@ dependencies = [
"winresource",
"workspace",
"zed_actions",
"zstd",
]
[[package]]
@@ -14714,7 +14740,7 @@ dependencies = [
[[package]]
name = "zed_elixir"
version = "0.1.1"
version = "0.1.0"
dependencies = [
"zed_extension_api 0.1.0",
]
@@ -14797,7 +14823,7 @@ dependencies = [
[[package]]
name = "zed_php"
version = "0.2.2"
version = "0.2.1"
dependencies = [
"zed_extension_api 0.1.0",
]
@@ -14838,6 +14864,13 @@ dependencies = [
"zed_extension_api 0.1.0",
]
[[package]]
name = "zed_svelte"
version = "0.2.0"
dependencies = [
"zed_extension_api 0.1.0",
]
[[package]]
name = "zed_terraform"
version = "0.1.1"
@@ -14866,6 +14899,14 @@ dependencies = [
"zed_extension_api 0.1.0",
]
[[package]]
name = "zed_vue"
version = "0.1.0"
dependencies = [
"serde",
"zed_extension_api 0.1.0",
]
[[package]]
name = "zed_zig"
version = "0.3.1"

View File

@@ -23,6 +23,7 @@ members = [
"crates/context_servers",
"crates/copilot",
"crates/db",
"crates/dev_server_projects",
"crates/diagnostics",
"crates/docs_preprocessor",
"crates/editor",
@@ -44,6 +45,7 @@ members = [
"crates/google_ai",
"crates/gpui",
"crates/gpui_macros",
"crates/headless",
"crates/html_to_markdown",
"crates/http_client",
"crates/image_viewer",
@@ -156,10 +158,12 @@ members = [
"extensions/ruff",
"extensions/slash-commands-example",
"extensions/snippets",
"extensions/svelte",
"extensions/terraform",
"extensions/test-extension",
"extensions/toml",
"extensions/uiua",
"extensions/vue",
"extensions/zig",
#
@@ -199,6 +203,7 @@ command_palette_hooks = { path = "crates/command_palette_hooks" }
context_servers = { path = "crates/context_servers" }
copilot = { path = "crates/copilot" }
db = { path = "crates/db" }
dev_server_projects = { path = "crates/dev_server_projects" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
extension = { path = "crates/extension" }
@@ -216,6 +221,7 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false, features = ["http_client"]}
gpui_macros = { path = "crates/gpui_macros" }
headless = { path = "crates/headless" }
html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" }
image_viewer = { path = "crates/image_viewer" }
@@ -343,7 +349,6 @@ ctor = "0.2.6"
dashmap = "6.0"
derive_more = "0.99.17"
dirs = "4.0"
ec4rs = "1.1"
emojis = "0.6.1"
env_logger = "0.11"
exec = "0.3.1"
@@ -386,14 +391,7 @@ pulldown-cmark = { version = "0.12.0", default-features = false }
rand = "0.8.5"
regex = "1.5"
repair_json = "0.1.0"
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = [
"charset",
"http2",
"macos-system-configuration",
"rustls-tls-native-roots",
"socks",
"stream",
] }
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f6998da16bbca97b6dddda9be7827c50e29", default-features = false, features = ["charset", "http2", "macos-system-configuration", "rustls-tls-native-roots", "stream"]}
rsa = "0.9.6"
runtimelib = { version = "0.15", default-features = false, features = [
"async-dispatcher-runtime",
@@ -455,7 +453,7 @@ tree-sitter-diff = "0.1.0"
tree-sitter-html = "0.20"
tree-sitter-jsdoc = "0.23"
tree-sitter-json = "0.23"
tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "9a23c1a96c0513d8fc6520972beedd419a973539" }
tree-sitter-md = { git = "https://github.com/zed-industries/tree-sitter-markdown", rev = "4cfa6aad6b75052a5077c80fd934757d9267d81b" }
tree-sitter-python = "0.23"
tree-sitter-regex = "0.23"
tree-sitter-ruby = "0.23"

View File

@@ -65,7 +65,6 @@
"h": "c",
"handlebars": "code",
"hbs": "template",
"hcl": "hcl",
"heex": "elixir",
"heic": "image",
"heif": "image",
@@ -90,7 +89,6 @@
"json": "storage",
"jsonc": "storage",
"jsx": "react",
"julia": "julia",
"jxl": "image",
"kt": "kotlin",
"ldf": "storage",
@@ -118,7 +116,6 @@
"myd": "storage",
"myi": "storage",
"nim": "nim",
"nix": "nix",
"nu": "terminal",
"odp": "document",
"ods": "document",
@@ -131,7 +128,6 @@
"php": "php",
"plist": "template",
"png": "image",
"postcss": "css",
"ppt": "document",
"pptx": "document",
"prettierignore": "prettier",
@@ -146,15 +142,12 @@
"rb": "ruby",
"rebar.config": "erlang",
"rkt": "code",
"roc": "roc",
"rs": "rust",
"rtf": "document",
"sass": "sass",
"sav": "storage",
"sc": "scala",
"scala": "scala",
"scm": "code",
"scss": "sass",
"sdf": "storage",
"sh": "terminal",
"sql": "storage",
@@ -188,7 +181,6 @@
"yaml": "settings",
"yml": "settings",
"yrl": "erlang",
"zig": "zig",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
@@ -273,9 +265,6 @@
"haskell": {
"icon": "icons/file_icons/haskell.svg"
},
"hcl": {
"icon": "icons/file_icons/hcl.svg"
},
"heroku": {
"icon": "icons/file_icons/heroku.svg"
},
@@ -288,9 +277,6 @@
"javascript": {
"icon": "icons/file_icons/javascript.svg"
},
"julia": {
"icon": "icons/file_icons/julia.svg"
},
"kotlin": {
"icon": "icons/file_icons/kotlin.svg"
},
@@ -306,9 +292,6 @@
"nim": {
"icon": "icons/file_icons/nim.svg"
},
"nix": {
"icon": "icons/file_icons/nix.svg"
},
"ocaml": {
"icon": "icons/file_icons/ocaml.svg"
},
@@ -333,18 +316,12 @@
"react": {
"icon": "icons/file_icons/react.svg"
},
"roc": {
"icon": "icons/file_icons/roc.svg"
},
"ruby": {
"icon": "icons/file_icons/ruby.svg"
},
"rust": {
"icon": "icons/file_icons/rust.svg"
},
"sass": {
"icon": "icons/file_icons/sass.svg"
},
"scala": {
"icon": "icons/file_icons/scala.svg"
},
@@ -383,9 +360,6 @@
},
"vue": {
"icon": "icons/file_icons/vue.svg"
},
"zig": {
"icon": "icons/file_icons/zig.svg"
}
}
}

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.11466 3.11809C7.21859 3.37393 7.09545 3.66558 6.83961 3.76952L4.31181 4.79643C4.1233 4.87302 4 5.05619 4 5.25967V11.5C4 11.7761 3.77614 12 3.5 12H2.5C2.22386 12 2 11.7761 2 11.5V4.41827C2 3.90959 2.30825 3.45164 2.77953 3.26018L6.08686 1.91658C6.34269 1.81265 6.63434 1.93579 6.73828 2.19163L7.11466 3.11809ZM10.5 1.99999C10.7761 1.99999 11 2.22384 11 2.49999V10.5C11 10.7761 10.7761 11 10.5 11H9.5C9.22386 11 9 10.7761 9 10.5V9.49999C9 9.22384 8.77614 8.99999 8.5 8.99999H7.5C7.22386 8.99999 7 9.22384 7 9.49999V13.5C7 13.7761 6.77614 14 6.5 14H5.5C5.22386 14 5 13.7761 5 13.5V5.53124C5 5.25509 5.22386 5.03124 5.5 5.03124H6.5C6.77614 5.03124 7 5.25509 7 5.53124V6.49999C7 6.77613 7.22386 6.99999 7.5 6.99999H8.5C8.77614 6.99999 9 6.77613 9 6.49999V2.49999C9 2.22384 9.22386 1.99999 9.5 1.99999H10.5ZM13.5 4.03124C13.7761 4.03124 14 4.2551 14 4.53124L14 11.5847C14 12.0859 13.7006 12.5386 13.2394 12.7349L9.99399 14.1159C9.7399 14.224 9.44626 14.1057 9.33813 13.8516L8.94658 12.9315C8.83845 12.6774 8.95678 12.3837 9.21087 12.2756L11.6958 11.2182C11.8802 11.1397 12 10.9586 12 10.7581L12 4.53124C12 4.2551 12.2238 4.03124 12.5 4.03124L13.5 4.03124Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="8" cy="5" r="2.75" fill="black"/>
<circle cx="4.75" cy="11" r="2.75" fill="black" fill-opacity="0.5"/>
<circle cx="11.25" cy="11" r="2.75" fill="black" fill-opacity="0.75"/>
</svg>

Before

Width:  |  Height:  |  Size: 289 B

View File

@@ -1,8 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6.00005 4.76556L4.76569 2.74996M6.00005 4.76556L3.75 4.76563M6.00005 4.76556L7.25006 4.7656" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M10.0232 11.2311L11.2675 13.2406M10.0232 11.2311L12.2732 11.2199M10.0232 11.2311L8.7732 11.2373" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
<path d="M9.99025 4.91551L10.9985 2.77781M9.99025 4.91551L8.75599 3.03419M9.99025 4.91551L10.6759 5.9607" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
<path d="M6.0323 11.1009L5.03465 13.2436M6.0323 11.1009L7.27585 12.9761M6.0323 11.1009L5.34151 10.0592" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M11.883 8.19023L14.2466 8.19287M11.883 8.19023L13.0602 6.27268M11.883 8.19023L11.229 9.25547" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
<path d="M4.12354 7.8356L1.76002 7.84465M4.12354 7.8356L2.95585 9.75894M4.12354 7.8356L4.7723 6.76713" stroke="black" stroke-opacity="0.5" stroke-width="1.5" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,7 +0,0 @@
<svg width="17" height="16" viewBox="0 0 17 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.51497 2.02702L1.92042 1.95067C1.69543 1.94589 1.57917 2.21756 1.73796 2.37702L6.24865 6.9068C6.42388 7.08277 6.72071 6.92326 6.67067 6.68002L5.75454 2.22659C5.73103 2.11231 5.63161 2.02949 5.51497 2.02702Z" fill="black" fill-opacity="0.5"/>
<path d="M8.05816 7.38492L12.1366 8.02844C12.3704 8.06532 12.5198 7.78697 12.3599 7.61255L7.30439 2.09814C7.13336 1.91159 6.82522 2.06811 6.87499 2.31624L7.852 7.18714C7.87257 7.28971 7.95483 7.36862 8.05816 7.38492Z" fill="black"/>
<path d="M9.0952 10.9797L11.3824 9.35081C11.564 9.22151 11.4983 8.93722 11.2785 8.90058L8.496 8.43683C8.31974 8.40746 8.17047 8.56712 8.21162 8.74101L8.70689 10.8337C8.74777 11.0064 8.95062 11.0827 9.0952 10.9797Z" fill="black" fill-opacity="0.5"/>
<path d="M5.10282 13.9632L7.59108 12.4532C7.68331 12.3972 7.72923 12.2884 7.70498 12.1832L6.75736 8.07484C6.699 7.8218 6.34133 7.81448 6.27266 8.06491L4.73201 13.6834C4.67223 13.9014 4.90954 14.0805 5.10282 13.9632Z" fill="black"/>
<path d="M11.3183 4.89351L13.1588 7.03149L15.535 6.14302C15.7099 6.07761 15.754 5.85043 15.6161 5.72438L13.7222 3.99219L11.4546 4.48614C11.2695 4.52645 11.1947 4.74995 11.3183 4.89351Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,3 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.92096 7.00668C7.87408 7.83549 10.0987 7.48203 10.9376 7.06254C12.8751 6.09381 13.9407 4.39379 12.6407 2.90629C11.0157 1.04692 6.24221 2.49998 4.89844 3.40625C3.55467 4.31252 2.67972 5.53126 2.89071 7.1719C3.1017 8.81254 4.68758 9.7422 6.03128 10.3203C5.38786 10.5616 3.8517 11.0388 3.3125 11.7188C2.71341 12.4742 3.04343 14 4.51577 14C7.15639 14 7.59539 11.1486 7.14847 10.4375C7.88773 10.1295 8.49597 9.96169 9.40138 9.77081C9.63831 9.72087 9.65457 9.46395 9.41295 9.44827C8.80252 9.40864 7.30567 9.8489 6.92096 9.97657C5.78909 9.35157 4.51016 7.93818 4.59378 6.87501C4.68676 5.6928 5.27676 5.07603 6.84508 4.21876C8.01705 3.57813 10.258 3.10695 11.25 3.62501C12.6563 4.35936 10.7875 5.75599 9.92969 6.32031C9.28179 6.74656 8.21971 6.77513 7.22979 6.61435C6.99371 6.576 6.74048 6.84974 6.92096 7.00668ZM5.6719 12.4643C6.35508 11.9894 6.45471 11.1076 6.29955 10.8844C5.76663 11.0874 4.36593 11.9102 4.75111 12.4643C4.90628 12.6875 5.31358 12.7134 5.6719 12.4643Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M14.25 12H11C10.794 12 10.6764 11.7648 10.8 11.6L11.925 10.1C11.9722 10.037 12.0463 10 12.125 10H12.75C12.8881 10 13 9.88807 13 9.75V6.25C13 6.11193 12.8881 6 12.75 6H12.4045C12.2187 6 12.0978 5.80442 12.1809 5.6382L12.9309 4.1382C12.9732 4.0535 13.0598 4 13.1545 4H14.25C14.3881 4 14.5 4.11193 14.5 4.25V11.75C14.5 11.8881 14.3881 12 14.25 12Z" fill="black"/>
<path d="M1.75 4H5C5.20601 4 5.32361 4.23519 5.2 4.4L4.075 5.9C4.02779 5.96295 3.95369 6 3.875 6H3.25C3.11193 6 3 6.11193 3 6.25V9.75C3 9.88807 3.11193 10 3.25 10H3.59549C3.78134 10 3.90221 10.1956 3.8191 10.3618L3.0691 11.8618C3.02675 11.9465 2.94018 12 2.84549 12H1.75C1.61193 12 1.5 11.8881 1.5 11.75V4.25C1.5 4.11193 1.61193 4 1.75 4Z" fill="black"/>
<path d="M7.55748 6H5.95006C5.74177 6 5.62482 5.76022 5.75306 5.59609L6.92493 4.09609C6.97231 4.03544 7.04498 4 7.12194 4H9.93075C9.97607 4 10.0205 3.98769 10.0594 3.96437L11.6408 3.0155C11.8641 2.88154 12.1179 3.13555 11.9837 3.3587L8.22612 9.6083C8.12629 9.77433 8.24508 9.98591 8.43881 9.98712L10.0039 9.9969C10.2092 9.99818 10.3255 10.2327 10.2023 10.3969L9.075 11.9C9.02779 11.963 8.95369 12 8.875 12H6.55383C6.51835 12 6.48328 12.0076 6.45094 12.0222L4.32473 12.9824C4.10122 13.0833 3.88113 12.8356 4.00771 12.6255L7.77161 6.37903C7.87201 6.2124 7.75202 6 7.55748 6Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

View File

@@ -313,15 +313,6 @@
"ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-1": ["editor::FoldAtLevel", { "level": 1 }],
"ctrl-k ctrl-2": ["editor::FoldAtLevel", { "level": 2 }],
"ctrl-k ctrl-3": ["editor::FoldAtLevel", { "level": 3 }],
"ctrl-k ctrl-4": ["editor::FoldAtLevel", { "level": 4 }],
"ctrl-k ctrl-5": ["editor::FoldAtLevel", { "level": 5 }],
"ctrl-k ctrl-6": ["editor::FoldAtLevel", { "level": 6 }],
"ctrl-k ctrl-7": ["editor::FoldAtLevel", { "level": 7 }],
"ctrl-k ctrl-8": ["editor::FoldAtLevel", { "level": 8 }],
"ctrl-k ctrl-9": ["editor::FoldAtLevel", { "level": 9 }],
"ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -514,13 +505,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "ProposedChangesEditor",
"bindings": {
"ctrl-shift-y": "editor::ApplyDiffHunk",
"ctrl-alt-a": "editor::ApplyAllDiffHunks"
}
},
{
"context": "Editor && jupyter && !ContextEditor",
"bindings": {

View File

@@ -349,15 +349,7 @@
"alt-cmd-]": "editor::UnfoldLines",
"cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-1": ["editor::FoldAtLevel", { "level": 1 }],
"cmd-k cmd-2": ["editor::FoldAtLevel", { "level": 2 }],
"cmd-k cmd-3": ["editor::FoldAtLevel", { "level": 3 }],
"cmd-k cmd-4": ["editor::FoldAtLevel", { "level": 4 }],
"cmd-k cmd-5": ["editor::FoldAtLevel", { "level": 5 }],
"cmd-k cmd-6": ["editor::FoldAtLevel", { "level": 6 }],
"cmd-k cmd-7": ["editor::FoldAtLevel", { "level": 7 }],
"cmd-k cmd-8": ["editor::FoldAtLevel", { "level": 8 }],
"cmd-k cmd-9": ["editor::FoldAtLevel", { "level": 9 }],
"cmd-k cmd-]": "editor::UnfoldRecursive",
"cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions",
@@ -546,13 +538,6 @@
"ctrl-enter": "assistant::InlineAssist"
}
},
{
"context": "ProposedChangesEditor",
"bindings": {
"cmd-shift-y": "editor::ApplyDiffHunk",
"cmd-shift-a": "editor::ApplyAllDiffHunks"
}
},
{
"context": "PromptEditor",
"bindings": {

View File

@@ -34,7 +34,7 @@
"cmd-]": "pane::GoForward",
"alt-f7": "editor::FindAllReferences",
"cmd-alt-f7": "editor::FindAllReferences",
"cmd-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock
"cmd-b": "editor::GoToDefinition",
"cmd-alt-b": "editor::GoToDefinitionSplit",
"cmd-shift-b": "editor::GoToTypeDefinition",
"cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit",
@@ -64,8 +64,7 @@
"cmd-shift-o": "file_finder::Toggle",
"cmd-shift-a": "command_palette::Toggle",
"shift shift": "command_palette::Toggle",
"cmd-alt-o": "project_symbols::Toggle", // JetBrains: Go to Symbol
"cmd-o": "project_symbols::Toggle", // JetBrains: Go to Class
"cmd-alt-o": "project_symbols::Toggle",
"cmd-1": "workspace::ToggleLeftDock",
"cmd-6": "diagnostics::Deploy"
}

View File

@@ -88,6 +88,7 @@ origin: (f64, f64),
<edit>
<path>src/shapes/rectangle.rs</path>
<description>Update the Rectangle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(width: f64, height: f64) -> Self {
@@ -116,6 +117,7 @@ pub struct Circle {
<edit>
<path>src/shapes/circle.rs</path>
<description>Update the Circle's new function to take an origin parameter</description>
<operation>update</operation>
<old_text>
fn new(radius: f64) -> Self {
@@ -132,6 +134,7 @@ fn new(origin: (f64, f64), radius: f64) -> Self {
<edit>
<path>src/shapes/rectangle.rs</path>
<description>Add an import for the std::fmt module</description>
<operation>insert_before</operation>
<old_text>
struct Rectangle {
@@ -144,10 +147,7 @@ use std::fmt;
<edit>
<path>src/shapes/rectangle.rs</path>
<description>
Add a manual Display implementation for Rectangle.
Currently, this is the same as a derived Display implementation.
</description>
<description>Add a Display implementation for Rectangle</description>
<operation>insert_after</operation>
<old_text>
Rectangle { width, height }
@@ -169,6 +169,7 @@ impl fmt::Display for Rectangle {
<edit>
<path>src/shapes/circle.rs</path>
<description>Add an import for the `std::fmt` module</description>
<operation>insert_before</operation>
<old_text>
struct Circle {
@@ -180,6 +181,7 @@ use std::fmt;
<edit>
<path>src/shapes/circle.rs</path>
<description>Add a Display implementation for Circle</description>
<operation>insert_after</operation>
<old_text>
Circle { radius }

View File

@@ -346,8 +346,6 @@
"git_status": true,
// Amount of indentation for nested items.
"indent_size": 20,
// Whether to show indent guides in the project panel.
"indent_guides": true,
// Whether to reveal it in the project panel automatically,
// when a corresponding project entry becomes active.
// Gitignored entries are never auto revealed.
@@ -805,7 +803,7 @@
/// You can override this to use a version of node that is not in $PATH with:
/// {
/// "node": {
/// "path": "/path/to/node"
/// "node_path": "/path/to/node"
/// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
/// }
/// }
@@ -1101,13 +1099,13 @@
// }
"command_aliases": {},
// ssh_connections is an array of ssh connections.
// By default this setting is null, which disables the direct ssh connection support.
// You can configure these from `project: Open Remote` in the command palette.
// Zed's ssh support will pull configuration from your ~/.ssh too.
// Examples:
// [
// {
// "host": "example-box",
// // "port": 22, "username": "test", "args": ["-i", "/home/user/.ssh/id_rsa"]
// "projects": [
// {
// "paths": ["/home/user/code/zed"]
@@ -1115,7 +1113,7 @@
// ]
// }
// ]
"ssh_connections": [],
"ssh_connections": null,
// Configures the Context Server Protocol binaries
//
// Examples:

View File

@@ -29,13 +29,13 @@ pub struct AnthropicModelCacheConfiguration {
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
Claude3_5Sonnet,
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
Claude3Haiku,
#[serde(rename = "custom")]
Custom {
@@ -69,10 +69,10 @@ impl Model {
pub fn id(&self) -> &str {
match self {
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Model::Claude3Opus => "claude-3-opus-latest",
Model::Claude3Sonnet => "claude-3-sonnet-latest",
Model::Claude3Haiku => "claude-3-haiku-latest",
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
Model::Claude3Opus => "claude-3-opus-20240229",
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
Model::Claude3Haiku => "claude-3-haiku-20240307",
Self::Custom { name, .. } => name,
}
}

View File

@@ -26,8 +26,8 @@ use collections::{BTreeSet, HashMap, HashSet};
use editor::{
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
display_map::{
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, Crease,
CreaseMetadata, CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorEvent, ProposedChangeLocation, ProposedChangesEditor, RowExt,
@@ -356,10 +356,8 @@ impl AssistantPanel {
let project = workspace.project().clone();
pane.set_custom_drop_handle(cx, move |_, dropped_item, cx| {
let action = maybe!({
if project.read(cx).is_local() {
if let Some(paths) = dropped_item.downcast_ref::<ExternalPaths>() {
return Some(InsertDraggedFiles::ExternalFiles(paths.paths().to_vec()));
}
if let Some(paths) = dropped_item.downcast_ref::<ExternalPaths>() {
return Some(InsertDraggedFiles::ExternalFiles(paths.paths().to_vec()));
}
let project_paths = if let Some(tab) = dropped_item.downcast_ref::<DraggedTab>()
@@ -963,7 +961,7 @@ impl AssistantPanel {
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
let project = self.project.read(cx);
if project.is_via_collab() {
if project.is_via_collab() && project.dev_server_project_id().is_none() {
let task = self
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
@@ -2009,12 +2007,13 @@ impl ContextEditor {
})
.map(|(command, error_message)| BlockProperties {
style: BlockStyle::Fixed,
height: 1,
placement: BlockPlacement::Below(Anchor {
position: Anchor {
buffer_id: Some(buffer_id),
excerpt_id,
text_anchor: command.source_range.start,
}),
},
height: 1,
disposition: BlockDisposition::Below,
render: slash_command_error_block_renderer(error_message),
priority: 0,
}),
@@ -2220,7 +2219,6 @@ impl ContextEditor {
merge_adjacent: false,
};
let should_refold;
if let Some(state) = self.patches.get_mut(&range) {
replaced_blocks.insert(state.footer_block_id, render_block);
if let Some(editor_state) = &state.editor {
@@ -2235,16 +2233,14 @@ impl ContextEditor {
});
}
}
should_refold =
snapshot.intersects_fold(patch_start.to_offset(&snapshot.buffer_snapshot));
} else {
let block_ids = editor.insert_blocks(
[BlockProperties {
position: patch_start,
height: path_count as u32 + 1,
style: BlockStyle::Flex,
render: render_block,
placement: BlockPlacement::Below(patch_start),
disposition: BlockDisposition::Below,
priority: 0,
}],
None,
@@ -2270,14 +2266,10 @@ impl ContextEditor {
update_task: None,
},
);
should_refold = true;
}
if should_refold {
editor.unfold_ranges([patch_start..patch_end], true, false, cx);
editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx);
}
editor.unfold_ranges([patch_start..patch_end], true, false, cx);
editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx);
}
editor.remove_creases(removed_crease_ids, cx);
@@ -2729,13 +2721,12 @@ impl ContextEditor {
})
};
let create_block_properties = |message: &Message| BlockProperties {
position: buffer
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
.unwrap(),
height: 2,
style: BlockStyle::Sticky,
placement: BlockPlacement::Above(
buffer
.anchor_in_excerpt(excerpt_id, message.anchor_range.start)
.unwrap(),
),
disposition: BlockDisposition::Above,
priority: usize::MAX,
render: render_block(MessageMetadata::from(message)),
};
@@ -3371,7 +3362,7 @@ impl ContextEditor {
let anchor = buffer.anchor_in_excerpt(excerpt_id, anchor).unwrap();
let image = render_image.clone();
anchor.is_valid(&buffer).then(|| BlockProperties {
placement: BlockPlacement::Above(anchor),
position: anchor,
height: MAX_HEIGHT_IN_LINES,
style: BlockStyle::Sticky,
render: Box::new(move |cx| {
@@ -3392,6 +3383,8 @@ impl ContextEditor {
)
.into_any_element()
}),
disposition: BlockDisposition::Above,
priority: 0,
})
})
@@ -3946,7 +3939,7 @@ impl Render for ContextEditor {
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.gap_1()
.gap_2()
.child(render_inject_context_menu(cx.view().downgrade(), cx))
.child(
IconButton::new("quote-button", IconName::Quote)
@@ -4246,11 +4239,11 @@ fn render_inject_context_menu(
slash_command_picker::SlashCommandSelector::new(
commands.clone(),
active_context_editor,
Button::new("trigger", "Add Context")
.icon(IconName::Plus)
IconButton::new("trigger", IconName::SlashSquare)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.tooltip(|cx| Tooltip::text("Type / to insert via keyboard", cx)),
.tooltip(|cx| {
Tooltip::with_meta("Insert Context", None, "Type / to insert via keyboard", cx)
}),
)
}

View File

@@ -7,7 +7,7 @@ use crate::{
};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry, SlashCommandResult,
SlashCommandOutput, SlashCommandOutputSection, SlashCommandRegistry,
};
use assistant_tool::ToolRegistry;
use client::{self, proto, telemetry::Telemetry};
@@ -1677,7 +1677,7 @@ impl Context {
pub fn insert_command_output(
&mut self,
command_range: Range<language::Anchor>,
output: Task<SlashCommandResult>,
output: Task<Result<SlashCommandOutput>>,
ensure_trailing_newline: bool,
expand_result: bool,
cx: &mut ModelContext<Self>,
@@ -1688,13 +1688,19 @@ impl Context {
let command_range = command_range.clone();
async move {
let output = output.await;
let output = match output {
Ok(output) => SlashCommandOutput::from_event_stream(output).await,
Err(err) => Err(err),
};
this.update(&mut cx, |this, cx| match output {
Ok(mut output) => {
output.ensure_valid_section_ranges();
// Ensure section ranges are valid.
for section in &mut output.sections {
section.range.start = section.range.start.min(output.text.len());
section.range.end = section.range.end.min(output.text.len());
while !output.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !output.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
// Ensure there is a newline after the last section.
if ensure_trailing_newline {
@@ -2481,8 +2487,7 @@ impl Context {
request.messages.push(LanguageModelRequestMessage {
role: Role::User,
content: vec![
"Generate a concise 3-7 word title for this conversation, omitting punctuation"
.into(),
"Summarize the context into a short title without punctuation.".into(),
],
cache: false,
});

View File

@@ -6,7 +6,7 @@ use crate::{
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandRegistry, SlashCommandResult,
SlashCommandRegistry,
};
use collections::HashSet;
use fs::FakeFs;
@@ -636,7 +636,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn one".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -690,7 +690,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -754,7 +754,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -798,7 +798,7 @@ async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
kind: AssistantEditKind::InsertAfter {
old_text: "fn zero".into(),
new_text: "fn two() {}".into(),
description: Some("add a `two` function".into()),
description: "add a `two` function".into(),
},
}]],
cx,
@@ -1097,8 +1097,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std
text: output_text,
sections,
run_commands_in_text: false,
}
.to_event_stream())),
})),
true,
false,
cx,
@@ -1417,12 +1416,11 @@ impl SlashCommand for FakeSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
Task::ready(Ok(SlashCommandOutput {
text: format!("Executed fake command: {}", self.0),
sections: vec![],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -9,7 +9,7 @@ use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
actions::{MoveDown, MoveUp, SelectAll},
display_map::{
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
ToDisplayPoint,
},
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode,
@@ -54,7 +54,7 @@ use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
use terminal_view::terminal_panel::TerminalPanel;
use text::{OffsetRangeExt, ToPoint as _};
use theme::ThemeSettings;
use ui::{prelude::*, text_for_action, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
use util::{RangeExt, ResultExt};
use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace};
@@ -446,14 +446,15 @@ impl InlineAssistant {
let assist_blocks = vec![
BlockProperties {
style: BlockStyle::Sticky,
placement: BlockPlacement::Above(range.start),
position: range.start,
height: prompt_editor_height,
render: build_assist_editor_renderer(prompt_editor),
disposition: BlockDisposition::Above,
priority: 0,
},
BlockProperties {
style: BlockStyle::Sticky,
placement: BlockPlacement::Below(range.end),
position: range.end,
height: 0,
render: Box::new(|cx| {
v_flex()
@@ -463,6 +464,7 @@ impl InlineAssistant {
.border_color(cx.theme().status().info_border)
.into_any_element()
}),
disposition: BlockDisposition::Below,
priority: 0,
},
];
@@ -1177,7 +1179,7 @@ impl InlineAssistant {
let height =
deleted_lines_editor.update(cx, |editor, cx| editor.max_point(cx).row().0 + 1);
new_blocks.push(BlockProperties {
placement: BlockPlacement::Above(new_row),
position: new_row,
height,
style: BlockStyle::Flex,
render: Box::new(move |cx| {
@@ -1189,6 +1191,7 @@ impl InlineAssistant {
.child(deleted_lines_editor.clone())
.into_any_element()
}),
disposition: BlockDisposition::Above,
priority: 0,
});
}
@@ -1596,7 +1599,7 @@ impl PromptEditor {
// always show the cursor (even when it isn't focused) because
// typing in one will make what you typed appear in all of them.
editor.set_show_cursor_when_unfocused(true, cx);
editor.set_placeholder_text(Self::placeholder_text(codegen.read(cx), cx), cx);
editor.set_placeholder_text("Add a prompt…", cx);
editor
});
@@ -1653,7 +1656,6 @@ impl PromptEditor {
self.editor = cx.new_view(|cx| {
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, cx);
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
editor.set_placeholder_text(Self::placeholder_text(self.codegen.read(cx), cx), cx);
editor.set_placeholder_text("Add a prompt…", cx);
editor.set_text(prompt, cx);
if focus {
@@ -1664,20 +1666,6 @@ impl PromptEditor {
self.subscribe_to_editor(cx);
}
fn placeholder_text(codegen: &Codegen, cx: &WindowContext) -> String {
let context_keybinding = text_for_action(&crate::ToggleFocus, cx)
.map(|keybinding| format!("{keybinding} for context"))
.unwrap_or_default();
let action = if codegen.is_insertion {
"Generate"
} else {
"Transform"
};
format!("{action}{context_keybinding} • ↓↑ for history")
}
fn prompt(&self, cx: &AppContext) -> String {
self.editor.read(cx).text(cx)
}
@@ -2268,14 +2256,12 @@ pub enum CodegenEvent {
pub struct Codegen {
alternatives: Vec<Model<CodegenAlternative>>,
active_alternative: usize,
seen_alternatives: HashSet<usize>,
subscriptions: Vec<Subscription>,
buffer: Model<MultiBuffer>,
range: Range<Anchor>,
initial_transaction_id: Option<TransactionId>,
telemetry: Option<Arc<Telemetry>>,
builder: Arc<PromptBuilder>,
is_insertion: bool,
}
impl Codegen {
@@ -2298,10 +2284,8 @@ impl Codegen {
)
});
let mut this = Self {
is_insertion: range.to_offset(&buffer.read(cx).snapshot(cx)).is_empty(),
alternatives: vec![codegen],
active_alternative: 0,
seen_alternatives: HashSet::default(),
subscriptions: Vec::new(),
buffer,
range,
@@ -2354,7 +2338,6 @@ impl Codegen {
fn activate(&mut self, index: usize, cx: &mut ModelContext<Self>) {
self.active_alternative()
.update(cx, |codegen, cx| codegen.set_active(false, cx));
self.seen_alternatives.insert(index);
self.active_alternative = index;
self.active_alternative()
.update(cx, |codegen, cx| codegen.set_active(true, cx));
@@ -2484,8 +2467,6 @@ pub struct CodegenAlternative {
active: bool,
edits: Vec<(Range<Anchor>, String)>,
line_operations: Vec<LineOperation>,
request: Option<LanguageModelRequest>,
elapsed_time: Option<f64>,
}
enum CodegenStatus {
@@ -2557,8 +2538,6 @@ impl CodegenAlternative {
edits: Vec::new(),
line_operations: Vec::new(),
range,
request: None,
elapsed_time: None,
}
}
@@ -2655,7 +2634,6 @@ impl CodegenAlternative {
async { Ok(stream::empty().boxed()) }.boxed_local()
} else {
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
self.request = Some(request.clone());
let chunks = cx
.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await });
@@ -2700,7 +2678,7 @@ impl CodegenAlternative {
let prompt = self
.builder
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
.generate_content_prompt(user_prompt, language_name, buffer, range)
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
let mut messages = Vec::new();
@@ -2729,7 +2707,6 @@ impl CodegenAlternative {
stream: impl 'static + Future<Output = Result<BoxStream<'static, Result<String>>>>,
cx: &mut ModelContext<Self>,
) {
let start_time = Instant::now();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
.text_for_range(self.range.start..self.range.end)
@@ -2946,8 +2923,6 @@ impl CodegenAlternative {
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
@@ -2956,7 +2931,6 @@ impl CodegenAlternative {
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
cx.emit(CodegenEvent::Finished);
cx.notify();
})
@@ -3303,10 +3277,6 @@ impl CodeActionProvider for AssistantCodeActionProvider {
range: Range<text::Anchor>,
cx: &mut WindowContext,
) -> Task<Result<Vec<CodeAction>>> {
if !AssistantSettings::get_global(cx).enabled {
return Task::ready(Ok(Vec::new()));
}
let snapshot = buffer.read(cx).snapshot();
let mut range = range.to_point(&snapshot);

View File

@@ -158,34 +158,39 @@ impl PickerDelegate for ModelPickerDelegate {
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.start_slot(
div().pr_0p5().child(
div().pr_1().child(
Icon::new(model_info.icon)
.color(Color::Muted)
.size(IconSize::Medium),
),
)
.child(
h_flex().w_full().justify_between().min_w(px(200.)).child(
h_flex()
.gap_1p5()
.child(Label::new(model_info.model.name().0.clone()))
.child(
Label::new(provider_name)
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.children(match model_info.availability {
LanguageModelAvailability::Public => None,
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
show_badges.then(|| {
Label::new("Pro")
.size(LabelSize::XSmall)
.color(Color::Muted)
})
}
}),
),
h_flex()
.w_full()
.justify_between()
.font_buffer(cx)
.min_w(px(240.))
.child(
h_flex()
.gap_2()
.child(Label::new(model_info.model.name().0.clone()))
.child(
Label::new(provider_name)
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.children(match model_info.availability {
LanguageModelAvailability::Public => None,
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
show_badges.then(|| {
Label::new("Pro")
.size(LabelSize::XSmall)
.color(Color::Muted)
})
}
}),
),
)
.end_slot(div().when(model_info.is_selected, |this| {
this.child(
@@ -207,7 +212,7 @@ impl PickerDelegate for ModelPickerDelegate {
h_flex()
.w_full()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.border_color(cx.theme().colors().border)
.p_1()
.gap_4()
.justify_between()

View File

@@ -33,21 +33,21 @@ pub enum AssistantEditKind {
Update {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
Create {
new_text: String,
description: Option<String>,
description: String,
},
InsertBefore {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
InsertAfter {
old_text: String,
new_text: String,
description: Option<String>,
description: String,
},
Delete {
old_text: String,
@@ -86,37 +86,19 @@ enum SearchDirection {
Diagonal,
}
// A measure of the currently quality of an in-progress fuzzy search.
//
// Uses 60 bits to store a numeric cost, and 4 bits to store the preceding
// operation in the search.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
struct SearchState {
cost: u32,
score: u32,
direction: SearchDirection,
}
impl SearchState {
fn new(cost: u32, direction: SearchDirection) -> Self {
Self { cost, direction }
}
}
struct SearchMatrix {
cols: usize,
data: Vec<SearchState>,
}
impl SearchMatrix {
fn new(rows: usize, cols: usize) -> Self {
SearchMatrix {
cols,
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
}
}
fn get(&self, row: usize, col: usize) -> SearchState {
self.data[row * self.cols + col]
}
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
self.data[row * self.cols + col] = cost;
fn new(score: u32, direction: SearchDirection) -> Self {
Self { score, direction }
}
}
@@ -164,28 +146,12 @@ impl ResolvedEdit {
return false;
}
let other_offset_range = other_range.to_offset(buffer);
let offset_range = range.to_offset(buffer);
// If the other range is empty at the start of this edit's range, combine the new text
if other_offset_range.is_empty() && other_offset_range.start == offset_range.start {
self.new_text = format!("{}\n{}", other.new_text, self.new_text);
self.range.start = other_range.start;
if let Some((description, other_description)) =
self.description.as_mut().zip(other.description.as_ref())
{
*description = format!("{}\n{}", other_description, description)
}
} else {
if let Some((description, other_description)) =
self.description.as_mut().zip(other.description.as_ref())
{
if let Some(description) = &mut self.description {
if let Some(other_description) = &other.description {
description.push('\n');
description.push_str(other_description);
}
}
true
}
}
@@ -205,23 +171,23 @@ impl AssistantEdit {
"update" => AssistantEditKind::Update {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"insert_before" => AssistantEditKind::InsertBefore {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"insert_after" => AssistantEditKind::InsertAfter {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
},
"delete" => AssistantEditKind::Delete {
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
},
"create" => AssistantEditKind::Create {
description,
description: description.ok_or_else(|| anyhow!("missing description"))?,
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
},
_ => Err(anyhow!("unknown operation {operation:?}"))?,
@@ -282,7 +248,7 @@ impl AssistantEditKind {
ResolvedEdit {
range,
new_text,
description,
description: Some(description),
}
}
Self::Create {
@@ -290,7 +256,7 @@ impl AssistantEditKind {
description,
} => ResolvedEdit {
range: text::Anchor::MIN..text::Anchor::MAX,
description,
description: Some(description),
new_text,
},
Self::InsertBefore {
@@ -303,7 +269,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.start..range.start,
new_text,
description,
description: Some(description),
}
}
Self::InsertAfter {
@@ -316,7 +282,7 @@ impl AssistantEditKind {
ResolvedEdit {
range: range.end..range.end,
new_text,
description,
description: Some(description),
}
}
Self::Delete { old_text } => {
@@ -332,29 +298,44 @@ impl AssistantEditKind {
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
const INSERTION_COST: u32 = 3;
const DELETION_COST: u32 = 10;
const WHITESPACE_INSERTION_COST: u32 = 1;
const DELETION_COST: u32 = 3;
const WHITESPACE_DELETION_COST: u32 = 1;
const EQUALITY_BONUS: u32 = 5;
struct Matrix {
cols: usize,
data: Vec<SearchState>,
}
impl Matrix {
fn new(rows: usize, cols: usize) -> Self {
Matrix {
cols,
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
}
}
fn get(&self, row: usize, col: usize) -> SearchState {
self.data[row * self.cols + col]
}
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
self.data[row * self.cols + col] = cost;
}
}
let buffer_len = buffer.len();
let query_len = search_query.len();
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
let mut leading_deletion_cost = 0_u32;
let mut matrix = Matrix::new(query_len + 1, buffer_len + 1);
for (row, query_byte) in search_query.bytes().enumerate() {
let deletion_cost = if query_byte.is_ascii_whitespace() {
WHITESPACE_DELETION_COST
} else {
DELETION_COST
};
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
matrix.set(
row + 1,
0,
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
);
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
let deletion_cost = if query_byte.is_ascii_whitespace() {
WHITESPACE_DELETION_COST
} else {
DELETION_COST
};
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
WHITESPACE_INSERTION_COST
} else {
@@ -362,35 +343,38 @@ impl AssistantEditKind {
};
let up = SearchState::new(
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
matrix.get(row, col + 1).score.saturating_sub(deletion_cost),
SearchDirection::Up,
);
let left = SearchState::new(
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
matrix
.get(row + 1, col)
.score
.saturating_sub(insertion_cost),
SearchDirection::Left,
);
let diagonal = SearchState::new(
if query_byte == *buffer_byte {
matrix.get(row, col).cost
matrix.get(row, col).score.saturating_add(EQUALITY_BONUS)
} else {
matrix
.get(row, col)
.cost
.saturating_add(deletion_cost + insertion_cost)
.score
.saturating_sub(deletion_cost + insertion_cost)
},
SearchDirection::Diagonal,
);
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
matrix.set(row + 1, col + 1, up.max(left).max(diagonal));
}
}
// Traceback to find the best match
let mut best_buffer_end = buffer_len;
let mut best_cost = u32::MAX;
let mut best_score = 0;
for col in 1..=buffer_len {
let cost = matrix.get(query_len, col).cost;
if cost < best_cost {
best_cost = cost;
let score = matrix.get(query_len, col).score;
if score > best_score {
best_score = score;
best_buffer_end = col;
}
}
@@ -560,84 +544,89 @@ mod tests {
language_settings::AllLanguageSettings, Language, LanguageConfig, LanguageMatcher,
};
use settings::SettingsStore;
use text::{OffsetRangeExt, Point};
use ui::BorrowAppContext;
use unindent::Unindent as _;
use util::test::{generate_marked_text, marked_text_ranges};
#[gpui::test]
fn test_resolve_location(cx: &mut AppContext) {
assert_location_resolution(
concat!(
" Lorem\n",
"« ipsum\n",
" dolor sit amet»\n",
" consecteur",
),
"ipsum\ndolor",
cx,
);
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
" Lorem\n",
" ipsum\n",
" dolor sit amet\n",
" consecteur",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "ipsum\ndolor").to_point(&snapshot),
Point::new(1, 0)..Point::new(2, 18)
);
}
assert_location_resolution(
&"
«fn foo1(a: usize) -> usize {
40
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
"fn foo1(a: usize) -> usize {\n",
" 40\n",
"}\n",
"\n",
"fn foo2(b: usize) -> usize {\n",
" 42\n",
"}\n",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "fn foo1(b: usize) {\n40\n}")
.to_point(&snapshot),
Point::new(0, 0)..Point::new(2, 1)
);
}
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"fn foo1(b: usize) {\n40\n}",
cx,
);
assert_location_resolution(
&"
fn main() {
« Foo
.bar()
.baz()
.qux()»
}
fn foo2(b: usize) -> usize {
42
}
"
.unindent(),
"Foo.bar.baz.qux()",
cx,
);
assert_location_resolution(
&"
class Something {
one() { return 1; }
« two() { return 2222; }
three() { return 333; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
» seven() { return 7; }
eight() { return 8; }
}
"
.unindent(),
&"
two() { return 2222; }
four() { return 4444; }
five() { return 5555; }
six() { return 6666; }
"
.unindent(),
cx,
);
{
let buffer = cx.new_model(|cx| {
Buffer::local(
concat!(
"fn main() {\n",
" Foo\n",
" .bar()\n",
" .baz()\n",
" .qux()\n",
"}\n",
"\n",
"fn foo2(b: usize) -> usize {\n",
" 42\n",
"}\n",
),
cx,
)
});
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
AssistantEditKind::resolve_location(&snapshot, "Foo.bar.baz.qux()")
.to_point(&snapshot),
Point::new(1, 0)..Point::new(4, 14)
);
}
}
#[gpui::test]
fn test_resolve_edits(cx: &mut AppContext) {
init_test(cx);
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language::init(cx);
cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
});
assert_edits(
"
@@ -670,7 +659,7 @@ mod tests {
last_name: String,
"
.unindent(),
description: None,
description: "".into(),
},
AssistantEditKind::Update {
old_text: "
@@ -685,7 +674,7 @@ mod tests {
}
"
.unindent(),
description: None,
description: "".into(),
},
],
"
@@ -710,219 +699,6 @@ mod tests {
.unindent(),
cx,
);
// Ensure InsertBefore merges correctly with Update of the same text
assert_edits(
"
fn foo() {
}
"
.unindent(),
vec![
AssistantEditKind::InsertBefore {
old_text: "
fn foo() {"
.unindent(),
new_text: "
fn bar() {
qux();
}"
.unindent(),
description: Some("implement bar".into()),
},
AssistantEditKind::Update {
old_text: "
fn foo() {
}"
.unindent(),
new_text: "
fn foo() {
bar();
}"
.unindent(),
description: Some("call bar in foo".into()),
},
AssistantEditKind::InsertAfter {
old_text: "
fn foo() {
}
"
.unindent(),
new_text: "
fn qux() {
// todo
}
"
.unindent(),
description: Some("implement qux".into()),
},
],
"
fn bar() {
qux();
}
fn foo() {
bar();
}
fn qux() {
// todo
}
"
.unindent(),
cx,
);
// Correctly indent new text when replacing multiple adjacent indented blocks.
assert_edits(
"
impl Numbers {
fn one() {
1
}
fn two() {
2
}
fn three() {
3
}
}
"
.unindent(),
vec![
AssistantEditKind::Update {
old_text: "
fn one() {
1
}
"
.unindent(),
new_text: "
fn one() {
101
}
"
.unindent(),
description: None,
},
AssistantEditKind::Update {
old_text: "
fn two() {
2
}
"
.unindent(),
new_text: "
fn two() {
102
}
"
.unindent(),
description: None,
},
AssistantEditKind::Update {
old_text: "
fn three() {
3
}
"
.unindent(),
new_text: "
fn three() {
103
}
"
.unindent(),
description: None,
},
],
"
impl Numbers {
fn one() {
101
}
fn two() {
102
}
fn three() {
103
}
}
"
.unindent(),
cx,
);
assert_edits(
"
impl Person {
fn set_name(&mut self, name: String) {
self.name = name;
}
fn name(&self) -> String {
return self.name;
}
}
"
.unindent(),
vec![
AssistantEditKind::Update {
old_text: "self.name = name;".unindent(),
new_text: "self._name = name;".unindent(),
description: None,
},
AssistantEditKind::Update {
old_text: "return self.name;\n".unindent(),
new_text: "return self._name;\n".unindent(),
description: None,
},
],
"
impl Person {
fn set_name(&mut self, name: String) {
self._name = name;
}
fn name(&self) -> String {
return self._name;
}
}
"
.unindent(),
cx,
);
}
fn init_test(cx: &mut AppContext) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language::init(cx);
cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
});
}
#[track_caller]
fn assert_location_resolution(
text_with_expected_range: &str,
query: &str,
cx: &mut AppContext,
) {
let (text, _) = marked_text_ranges(text_with_expected_range, false);
let buffer = cx.new_model(|cx| Buffer::local(text.clone(), cx));
let snapshot = buffer.read(cx).snapshot();
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
let text_with_actual_range = generate_marked_text(&text, &[range], false);
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
}
#[track_caller]

View File

@@ -204,7 +204,7 @@ impl PromptBuilder {
Ok(())
}
pub fn generate_inline_transformation_prompt(
pub fn generate_content_prompt(
&self,
user_prompt: String,
language_name: Option<&LanguageName>,

View File

@@ -1,8 +1,7 @@
use super::create_label_for_command;
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use futures::StreamExt;
use gpui::{AppContext, AsyncAppContext, Task, WeakView};
@@ -18,8 +17,6 @@ use ui::{BorrowAppContext, WindowContext};
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub struct AutoSlashCommandFeatureFlag;
impl FeatureFlag for AutoSlashCommandFeatureFlag {
@@ -95,7 +92,7 @@ impl SlashCommand for AutoCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -147,8 +144,7 @@ impl SlashCommand for AutoCommand {
text: prompt,
sections: Vec::new(),
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,6 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fs::Fs;
use gpui::{AppContext, Model, Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -125,7 +123,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let fs = workspace.project().read(cx).fs().clone();
@@ -147,8 +145,7 @@ impl SlashCommand for CargoWorkspaceSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
});
output.unwrap_or_else(|error| Task::ready(Err(error)))

View File

@@ -1,7 +1,8 @@
use super::create_label_for_command;
use anyhow::{anyhow, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
SlashCommandOutputSection, SlashCommandResult,
SlashCommandOutputSection,
};
use collections::HashMap;
use context_servers::{
@@ -16,8 +17,6 @@ use text::LineEnding;
use ui::{IconName, SharedString};
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub struct ContextServerSlashCommand {
server_id: String,
prompt: Prompt,
@@ -129,7 +128,7 @@ impl SlashCommand for ContextServerSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let server_id = self.server_id.clone();
let prompt_name = self.prompt.name.clone();
@@ -146,28 +145,7 @@ impl SlashCommand for ContextServerSlashCommand {
return Err(anyhow!("Context server not initialized"));
};
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
// Check that there are only user roles
if result
.messages
.iter()
.any(|msg| !matches!(msg.role, context_servers::types::SamplingRole::User))
{
return Err(anyhow!(
"Prompt contains non-user roles, which is not supported"
));
}
// Extract text from user messages into a single prompt string
let mut prompt = result
.messages
.into_iter()
.filter_map(|msg| match msg.content {
context_servers::types::SamplingContent::Text { text } => Some(text),
_ => None,
})
.collect::<Vec<String>>()
.join("\n\n");
let mut prompt = result.prompt;
// We must normalize the line endings here, since servers might return CR characters.
LineEnding::normalize(&mut prompt);
@@ -185,8 +163,7 @@ impl SlashCommand for ContextServerSlashCommand {
}],
text: prompt,
run_commands_in_text: false,
}
.to_event_stream())
})
})
} else {
Task::ready(Err(anyhow!("Context server not found")))

View File

@@ -1,9 +1,7 @@
use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::{
@@ -50,7 +48,7 @@ impl SlashCommand for DefaultSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let store = PromptStore::global(cx);
cx.background_executor().spawn(async move {
let store = store.await?;
@@ -78,8 +76,7 @@ impl SlashCommand for DefaultSlashCommand {
}],
text,
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,7 @@
use crate::slash_command::file_command::{FileCommandMetadata, FileSlashCommand};
use anyhow::{anyhow, Result};
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use collections::HashSet;
use futures::future;
@@ -38,7 +37,7 @@ impl SlashCommand for DeltaSlashCommand {
_workspace: Option<WeakView<Workspace>>,
_cx: &mut WindowContext,
) -> Task<Result<Vec<ArgumentCompletion>>> {
Task::ready(Err(anyhow!("this command does not require argument")))
unimplemented!()
}
fn run(
@@ -49,7 +48,7 @@ impl SlashCommand for DeltaSlashCommand {
workspace: WeakView<Workspace>,
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let mut paths = HashSet::default();
let mut file_command_old_outputs = Vec::new();
let mut file_command_new_outputs = Vec::new();
@@ -86,28 +85,25 @@ impl SlashCommand for DeltaSlashCommand {
.zip(file_command_new_outputs)
{
if let Ok(new_output) = new_output {
if let Ok(new_output) = SlashCommandOutput::from_event_stream(new_output).await
{
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
if let Some(file_command_range) = new_output.sections.first() {
let new_text = &new_output.text[file_command_range.range.clone()];
if old_text.chars().ne(new_text.chars()) {
output.sections.extend(new_output.sections.into_iter().map(
|section| SlashCommandOutputSection {
range: output.text.len() + section.range.start
..output.text.len() + section.range.end,
icon: section.icon,
label: section.label,
metadata: section.metadata,
},
));
output.text.push_str(&new_output.text);
}
}
}
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -1,8 +1,6 @@
use super::{create_label_for_command, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use fuzzy::{PathMatch, StringMatchCandidate};
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{
@@ -21,8 +19,6 @@ use util::paths::PathMatcher;
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
pub(crate) struct DiagnosticsSlashCommand;
impl DiagnosticsSlashCommand {
@@ -171,7 +167,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -180,11 +176,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
cx.spawn(move |_| async move {
task.await?
.map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found"))
})
cx.spawn(move |_| async move { task.await?.ok_or_else(|| anyhow!("No diagnostics found")) })
}
}

View File

@@ -6,7 +6,6 @@ use std::time::Duration;
use anyhow::{anyhow, bail, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{AppContext, BackgroundExecutor, Model, Task, WeakView};
use indexed_docs::{
@@ -275,7 +274,7 @@ impl SlashCommand for DocsSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
if arguments.is_empty() {
return Task::ready(Err(anyhow!("missing an argument")));
};
@@ -356,8 +355,7 @@ impl SlashCommand for DocsSlashCommand {
})
.collect(),
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -6,7 +6,6 @@ use std::sync::Arc;
use anyhow::{anyhow, bail, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use futures::AsyncReadExt;
use gpui::{Task, WeakView};
@@ -134,7 +133,7 @@ impl SlashCommand for FetchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(argument) = arguments.first() else {
return Task::ready(Err(anyhow!("missing URL")));
};
@@ -167,8 +166,7 @@ impl SlashCommand for FetchSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,16 +1,11 @@
use super::{diagnostics_command::collect_buffer_diagnostics, SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandContent, SlashCommandEvent,
SlashCommandOutput, SlashCommandOutputSection, SlashCommandResult,
};
use futures::channel::mpsc;
use futures::Stream;
use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection};
use fuzzy::PathMatch;
use gpui::{AppContext, Model, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
use project::{PathMatchCandidateSet, Project};
use serde::{Deserialize, Serialize};
use smol::stream::StreamExt;
use std::{
fmt::Write,
ops::{Range, RangeInclusive},
@@ -21,8 +16,6 @@ use ui::prelude::*;
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::diagnostics_command::collect_buffer_diagnostics;
pub(crate) struct FileSlashCommand;
impl FileSlashCommand {
@@ -188,7 +181,7 @@ impl SlashCommand for FileSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow!("workspace was dropped")));
};
@@ -197,12 +190,7 @@ impl SlashCommand for FileSlashCommand {
return Task::ready(Err(anyhow!("missing path")));
};
Task::ready(Ok(collect_files(
workspace.read(cx).project().clone(),
arguments,
cx,
)
.boxed()))
collect_files(workspace.read(cx).project().clone(), arguments, cx)
}
}
@@ -210,7 +198,7 @@ fn collect_files(
project: Model<Project>,
glob_inputs: &[String],
cx: &mut AppContext,
) -> impl Stream<Item = Result<SlashCommandEvent>> {
) -> Task<Result<SlashCommandOutput>> {
let Ok(matchers) = glob_inputs
.into_iter()
.map(|glob_input| {
@@ -219,7 +207,7 @@ fn collect_files(
})
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
else {
return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
return Task::ready(Err(anyhow!("invalid path")));
};
let project_handle = project.downgrade();
@@ -229,11 +217,11 @@ fn collect_files(
.map(|worktree| worktree.read(cx).snapshot())
.collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move {
let mut output = SlashCommandOutput::default();
for snapshot in snapshots {
let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
let mut folded_directory_names_stack = Vec::new();
let mut is_top_level_directory = true;
@@ -249,19 +237,17 @@ fn collect_files(
continue;
}
while let Some(dir) = directory_stack.last() {
while let Some((dir, _, _)) = directory_stack.last() {
if entry.path.starts_with(dir) {
break;
}
directory_stack.pop().unwrap();
events_tx
.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
let (_, entry_name, start) = directory_stack.pop().unwrap();
output.sections.push(build_entry_output_section(
start..output.text.len().saturating_sub(1),
Some(&PathBuf::from(entry_name)),
true,
None,
));
}
let filename = entry
@@ -293,46 +279,23 @@ fn collect_files(
continue;
}
let prefix_paths = folded_directory_names_stack.drain(..).as_slice().join("/");
let entry_start = output.text.len();
if prefix_paths.is_empty() {
let label = if is_top_level_directory {
if is_top_level_directory {
output
.text
.push_str(&path_including_worktree_name.to_string_lossy());
is_top_level_directory = false;
path_including_worktree_name.to_string_lossy().to_string()
} else {
filename
};
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: label.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: label,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&filename);
}
directory_stack.push((entry.path.clone(), filename, entry_start));
} else {
let entry_name = format!("{}/{}", prefix_paths, &filename);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
label: entry_name.clone().into(),
metadata: None,
}))?;
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: entry_name,
run_commands_in_text: false,
},
)))?;
directory_stack.push(entry.path.clone());
output.text.push_str(&entry_name);
directory_stack.push((entry.path.clone(), entry_name, entry_start));
}
events_tx.unbounded_send(Ok(SlashCommandEvent::Content(
SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false,
},
)))?;
output.text.push('\n');
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| {
@@ -343,7 +306,6 @@ fn collect_files(
continue;
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
@@ -351,24 +313,33 @@ fn collect_files(
&mut output,
)
.log_err();
let mut buffer_events = output.to_event_stream();
while let Some(event) = buffer_events.next().await {
events_tx.unbounded_send(event)?;
}
}
}
}
while let Some(_) = directory_stack.pop() {
events_tx.unbounded_send(Ok(SlashCommandEvent::EndSection { metadata: None }))?;
while let Some((dir, entry, start)) = directory_stack.pop() {
if directory_stack.is_empty() {
let mut root_path = PathBuf::new();
root_path.push(snapshot.root_name());
root_path.push(&dir);
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&root_path),
true,
None,
));
} else {
output.sections.push(build_entry_output_section(
start..output.text.len(),
Some(&PathBuf::from(entry.as_str())),
true,
None,
));
}
}
}
anyhow::Ok(())
Ok(output)
})
.detach_and_log_err(cx);
events_rx.boxed()
}
pub fn codeblock_fence_for_path(
@@ -553,14 +524,11 @@ pub fn append_buffer_to_output(
#[cfg(test)]
mod test {
use assistant_slash_command::SlashCommandOutput;
use fs::FakeFs;
use gpui::TestAppContext;
use pretty_assertions::assert_eq;
use project::Project;
use serde_json::json;
use settings::SettingsStore;
use smol::stream::StreamExt;
use crate::slash_command::file_command::collect_files;
@@ -601,9 +569,8 @@ mod test {
let project = Project::test(fs, ["/root".as_ref()], cx).await;
let result_1 =
cx.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx));
let result_1 = SlashCommandOutput::from_event_stream(result_1.boxed())
let result_1 = cx
.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
.await
.unwrap();
@@ -611,17 +578,17 @@ mod test {
// 4 files + 2 directories
assert_eq!(result_1.sections.len(), 6);
let result_2 =
cx.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx));
let result_2 = SlashCommandOutput::from_event_stream(result_2.boxed())
let result_2 = cx
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
.await
.unwrap();
assert_eq!(result_1, result_2);
let result =
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
let result = cx
.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
.await
.unwrap();
assert!(result.text.starts_with("root/dir"));
// 5 files + 2 directories
@@ -664,9 +631,8 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
let result = SlashCommandOutput::from_event_stream(result.boxed())
let result = cx
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
@@ -726,9 +692,8 @@ mod test {
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
let result =
cx.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx));
let result = SlashCommandOutput::from_event_stream(result.boxed())
let result = cx
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
.await
.unwrap();
@@ -751,8 +716,6 @@ mod test {
assert_eq!(result.sections[6].label, "summercamp");
assert_eq!(result.sections[7].label, "zed/assets/themes");
assert_eq!(result.text, "zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n");
// Ensure that the project lasts until after the last await
drop(project);
}

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use chrono::Local;
use gpui::{Task, WeakView};
@@ -49,7 +48,7 @@ impl SlashCommand for NowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
_cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let now = Local::now();
let text = format!("Today is {now}.", now = now.to_rfc2822());
let range = 0..text.len();
@@ -63,7 +62,6 @@ impl SlashCommand for NowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -4,7 +4,7 @@ use super::{
};
use crate::PromptBuilder;
use anyhow::{anyhow, Result};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection, SlashCommandResult};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView, WindowContext};
use language::{Anchor, CodeLabel, LspAdapterDelegate};
@@ -76,7 +76,7 @@ impl SlashCommand for ProjectSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let model_registry = LanguageModelRegistry::read_global(cx);
let current_model = model_registry.active_model();
let prompt_builder = self.prompt_builder.clone();
@@ -162,8 +162,7 @@ impl SlashCommand for ProjectSlashCommand {
text: output,
sections,
run_commands_in_text: true,
}
.to_event_stream())
})
})
.await
})

View File

@@ -1,9 +1,7 @@
use super::{SlashCommand, SlashCommandOutput};
use crate::prompt_library::PromptStore;
use anyhow::{anyhow, Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::sync::{atomic::AtomicBool, Arc};
@@ -63,7 +61,7 @@ impl SlashCommand for PromptSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let title = arguments.to_owned().join(" ");
if title.trim().is_empty() {
return Task::ready(Err(anyhow!("missing prompt name")));
@@ -102,8 +100,7 @@ impl SlashCommand for PromptSlashCommand {
metadata: None,
}],
run_commands_in_text: true,
}
.to_event_stream())
})
})
}
}

View File

@@ -1,8 +1,10 @@
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
use super::{
create_label_for_command,
file_command::{build_entry_output_section, codeblock_fence_for_path},
SlashCommand, SlashCommandOutput,
};
use anyhow::Result;
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use feature_flags::FeatureFlag;
use gpui::{AppContext, Task, WeakView};
use language::{CodeLabel, LspAdapterDelegate};
@@ -14,9 +16,6 @@ use std::{
use ui::{prelude::*, IconName};
use workspace::Workspace;
use crate::slash_command::create_label_for_command;
use crate::slash_command::file_command::{build_entry_output_section, codeblock_fence_for_path};
pub(crate) struct SearchSlashCommandFeatureFlag;
impl FeatureFlag for SearchSlashCommandFeatureFlag {
@@ -64,7 +63,7 @@ impl SlashCommand for SearchSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -130,7 +129,6 @@ impl SlashCommand for SearchSlashCommand {
sections,
run_commands_in_text: false,
}
.to_event_stream()
})
.await;

View File

@@ -1,8 +1,6 @@
use super::{SlashCommand, SlashCommandOutput};
use anyhow::{anyhow, Context as _, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use editor::Editor;
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
@@ -48,7 +46,7 @@ impl SlashCommand for OutlineSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let output = workspace.update(cx, |workspace, cx| {
let Some(active_item) = workspace.active_item(cx) else {
return Task::ready(Err(anyhow!("no active tab")));
@@ -85,8 +83,7 @@ impl SlashCommand for OutlineSlashCommand {
}],
text: outline_text,
run_commands_in_text: false,
}
.to_event_stream())
})
})
});

View File

@@ -1,8 +1,6 @@
use super::{file_command::append_buffer_to_output, SlashCommand, SlashCommandOutput};
use anyhow::{Context, Result};
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
use collections::{HashMap, HashSet};
use editor::Editor;
use futures::future::join_all;
@@ -16,8 +14,6 @@ use ui::{ActiveTheme, WindowContext};
use util::ResultExt;
use workspace::Workspace;
use crate::slash_command::file_command::append_buffer_to_output;
pub(crate) struct TabSlashCommand;
const ALL_TABS_COMPLETION_ITEM: &str = "all";
@@ -136,7 +132,7 @@ impl SlashCommand for TabSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let tab_items_search = tab_items_for_queries(
Some(workspace),
arguments,
@@ -150,7 +146,7 @@ impl SlashCommand for TabSlashCommand {
for (full_path, buffer, _) in tab_items_search.await? {
append_buffer_to_output(&buffer, full_path.as_deref(), &mut output).log_err();
}
Ok(output.to_event_stream())
Ok(output)
})
}
}

View File

@@ -4,7 +4,6 @@ use std::sync::Arc;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{AppContext, Task, View, WeakView};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate};
@@ -63,7 +62,7 @@ impl SlashCommand for TerminalSlashCommand {
workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let Some(workspace) = workspace.upgrade() else {
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
};
@@ -97,8 +96,7 @@ impl SlashCommand for TerminalSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream()))
}))
}
}

View File

@@ -1,17 +1,17 @@
use std::sync::atomic::AtomicBool;
use crate::prompts::PromptBuilder;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use anyhow::Result;
use assistant_slash_command::{
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
SlashCommandResult,
};
use gpui::{Task, WeakView};
use language::{BufferSnapshot, LspAdapterDelegate};
use ui::prelude::*;
use workspace::Workspace;
use crate::prompts::PromptBuilder;
use workspace::Workspace;
pub(crate) struct WorkflowSlashCommand {
prompt_builder: Arc<PromptBuilder>,
@@ -60,7 +60,7 @@ impl SlashCommand for WorkflowSlashCommand {
_workspace: WeakView<Workspace>,
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult> {
) -> Task<Result<SlashCommandOutput>> {
let prompt_builder = self.prompt_builder.clone();
cx.spawn(|_cx| async move {
let text = prompt_builder.generate_workflow_prompt()?;
@@ -75,8 +75,7 @@ impl SlashCommand for WorkflowSlashCommand {
metadata: None,
}],
run_commands_in_text: false,
}
.to_event_stream())
})
})
}
}

View File

@@ -178,7 +178,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Info(info) => Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Dense)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.child(
h_flex()
@@ -224,7 +224,7 @@ impl PickerDelegate for SlashCommandDelegate {
SlashCommandEntry::Advert { renderer, .. } => Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Dense)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.child(renderer(cx)),
),

View File

@@ -15,15 +15,9 @@ path = "src/assistant_slash_command.rs"
anyhow.workspace = true
collections.workspace = true
derive_more.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
parking_lot.workspace = true
serde.workspace = true
serde_json.workspace = true
workspace.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
workspace = { workspace = true, features = ["test-support"] }

View File

@@ -1,8 +1,6 @@
mod slash_command_registry;
use anyhow::Result;
use futures::stream::{self, BoxStream};
use futures::StreamExt;
use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, WindowContext};
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
use serde::{Deserialize, Serialize};
@@ -58,8 +56,6 @@ pub struct ArgumentCompletion {
pub replace_previous_arguments: bool,
}
pub type SlashCommandResult = Result<BoxStream<'static, Result<SlashCommandEvent>>>;
pub trait SlashCommand: 'static + Send + Sync {
fn name(&self) -> String;
fn label(&self, _cx: &AppContext) -> CodeLabel {
@@ -91,7 +87,7 @@ pub trait SlashCommand: 'static + Send + Sync {
// perhaps another kind of delegate is needed here.
delegate: Option<Arc<dyn LspAdapterDelegate>>,
cx: &mut WindowContext,
) -> Task<SlashCommandResult>;
) -> Task<Result<SlashCommandOutput>>;
}
pub type RenderFoldPlaceholder = Arc<
@@ -100,146 +96,13 @@ pub type RenderFoldPlaceholder = Arc<
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
>;
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandContent {
Text {
text: String,
run_commands_in_text: bool,
},
}
#[derive(Debug, PartialEq, Eq)]
pub enum SlashCommandEvent {
StartSection {
icon: IconName,
label: SharedString,
metadata: Option<serde_json::Value>,
},
Content(SlashCommandContent),
EndSection {
metadata: Option<serde_json::Value>,
},
}
#[derive(Debug, Default, PartialEq, Clone)]
#[derive(Debug, Default, PartialEq)]
pub struct SlashCommandOutput {
pub text: String,
pub sections: Vec<SlashCommandOutputSection<usize>>,
pub run_commands_in_text: bool,
}
impl SlashCommandOutput {
pub fn ensure_valid_section_ranges(&mut self) {
for section in &mut self.sections {
section.range.start = section.range.start.min(self.text.len());
section.range.end = section.range.end.min(self.text.len());
while !self.text.is_char_boundary(section.range.start) {
section.range.start -= 1;
}
while !self.text.is_char_boundary(section.range.end) {
section.range.end += 1;
}
}
}
/// Returns this [`SlashCommandOutput`] as a stream of [`SlashCommandEvent`]s.
pub fn to_event_stream(mut self) -> BoxStream<'static, Result<SlashCommandEvent>> {
self.ensure_valid_section_ranges();
let mut events = Vec::new();
let mut last_section_end = 0;
for section in self.sections {
if last_section_end < section.range.start {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(last_section_end..section.range.start)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
events.push(Ok(SlashCommandEvent::StartSection {
icon: section.icon,
label: section.label,
metadata: section.metadata.clone(),
}));
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self
.text
.get(section.range.start..section.range.end)
.unwrap_or_default()
.to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
events.push(Ok(SlashCommandEvent::EndSection {
metadata: section.metadata,
}));
last_section_end = section.range.end;
}
if last_section_end < self.text.len() {
events.push(Ok(SlashCommandEvent::Content(SlashCommandContent::Text {
text: self.text[last_section_end..].to_string(),
run_commands_in_text: self.run_commands_in_text,
})));
}
stream::iter(events).boxed()
}
pub async fn from_event_stream(
mut events: BoxStream<'static, Result<SlashCommandEvent>>,
) -> Result<SlashCommandOutput> {
let mut output = SlashCommandOutput::default();
let mut section_stack = Vec::new();
while let Some(event) = events.next().await {
match event? {
SlashCommandEvent::StartSection {
icon,
label,
metadata,
} => {
let start = output.text.len();
section_stack.push(SlashCommandOutputSection {
range: start..start,
icon,
label,
metadata,
});
}
SlashCommandEvent::Content(SlashCommandContent::Text {
text,
run_commands_in_text,
}) => {
output.text.push_str(&text);
output.run_commands_in_text = run_commands_in_text;
if let Some(section) = section_stack.last_mut() {
section.range.end = output.text.len();
}
}
SlashCommandEvent::EndSection { metadata } => {
if let Some(mut section) = section_stack.pop() {
section.metadata = metadata;
output.sections.push(section);
}
}
}
}
while let Some(section) = section_stack.pop() {
output.sections.push(section);
}
Ok(output)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct SlashCommandOutputSection<T> {
pub range: Range<T>,
@@ -253,243 +116,3 @@ impl SlashCommandOutputSection<language::Anchor> {
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
}
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_slash_command_output_to_events_round_trip() {
// Test basic output consisting of a single section.
{
let text = "Hello, world!".to_string();
let range = 0..text.len();
let output = SlashCommandOutput {
text,
sections: vec![SlashCommandOutputSection {
range,
icon: IconName::Code,
label: "Section 1".into(),
metadata: None,
}],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Code,
label: "Section 1".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Hello, world!".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output where the sections do not comprise all of the text.
{
let text = "Apple\nCucumber\nBanana\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
SlashCommandOutputSection {
range: 15..22,
icon: IconName::Check,
label: "Fruit".into(),
metadata: None,
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Apple\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None },
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Cucumber\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::Check,
label: "Fruit".into(),
metadata: None
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Banana\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection { metadata: None }
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
// Test output consisting of multiple sections.
{
let text = "Line 1\nLine 2\nLine 3\nLine 4\n".to_string();
let output = SlashCommandOutput {
text,
sections: vec![
SlashCommandOutputSection {
range: 0..6,
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true })),
},
SlashCommandOutputSection {
range: 7..13,
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true })),
},
SlashCommandOutputSection {
range: 14..20,
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true })),
},
SlashCommandOutputSection {
range: 21..27,
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true })),
},
],
run_commands_in_text: false,
};
let events = output.clone().to_event_stream().collect::<Vec<_>>().await;
let events = events
.into_iter()
.filter_map(|event| event.ok())
.collect::<Vec<_>>();
assert_eq!(
events,
vec![
SlashCommandEvent::StartSection {
icon: IconName::FileCode,
label: "Section 1".into(),
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 1".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "a": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileDoc,
label: "Section 2".into(),
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 2".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "b": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileGit,
label: "Section 3".into(),
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 3".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "c": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
SlashCommandEvent::StartSection {
icon: IconName::FileToml,
label: "Section 4".into(),
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "Line 4".into(),
run_commands_in_text: false
}),
SlashCommandEvent::EndSection {
metadata: Some(json!({ "d": true }))
},
SlashCommandEvent::Content(SlashCommandContent::Text {
text: "\n".into(),
run_commands_in_text: false
}),
]
);
let new_output =
SlashCommandOutput::from_event_stream(output.clone().to_event_stream())
.await
.unwrap();
assert_eq!(new_output, output);
}
}
}

View File

@@ -32,5 +32,4 @@ settings.workspace = true
smol.workspace = true
tempfile.workspace = true
util.workspace = true
which.workspace = true
workspace.workspace = true

View File

@@ -11,7 +11,6 @@ use gpui::{
};
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use paths::remote_servers_dir;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
@@ -34,7 +33,6 @@ use std::{
};
use update_notification::UpdateNotification;
use util::ResultExt;
use which::which;
use workspace::notifications::NotificationId;
use workspace::Workspace;
@@ -432,11 +430,10 @@ impl AutoUpdater {
cx.notify();
}
pub async fn download_remote_server_release(
pub async fn get_latest_remote_server_release(
os: &str,
arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
mut release_channel: ReleaseChannel,
cx: &mut AsyncAppContext,
) -> Result<PathBuf> {
let this = cx.update(|cx| {
@@ -446,12 +443,15 @@ impl AutoUpdater {
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
let release = Self::get_release(
if release_channel == ReleaseChannel::Dev {
release_channel = ReleaseChannel::Nightly;
}
let release = Self::get_latest_release(
&this,
"zed-remote-server",
os,
arch,
version,
Some(release_channel),
cx,
)
@@ -466,97 +466,13 @@ impl AutoUpdater {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if smol::fs::metadata(&version_path).await.is_err() {
log::info!(
"downloading zed-remote-server {os} {arch} version {}",
release.version
);
log::info!("downloading zed-remote-server {os} {arch}");
download_remote_server_binary(&version_path, release, client, cx).await?;
}
Ok(version_path)
}
pub async fn get_remote_server_release_url(
os: &str,
arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncAppContext,
) -> Result<(String, String)> {
let this = cx.update(|cx| {
cx.default_global::<GlobalAutoUpdate>()
.0
.clone()
.ok_or_else(|| anyhow!("auto-update not initialized"))
})??;
let release = Self::get_release(
&this,
"zed-remote-server",
os,
arch,
version,
Some(release_channel),
cx,
)
.await?;
let update_request_body = build_remote_server_update_request_body(cx)?;
let body = serde_json::to_string(&update_request_body)?;
Ok((release.url, body))
}
async fn get_release(
this: &Model<Self>,
asset: &str,
os: &str,
arch: &str,
version: Option<SemanticVersion>,
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
if let Some(version) = version {
let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
Ok(JsonRelease {
version: version.to_string(),
url: client.build_url(&url),
})
} else {
let mut url_string = client.build_url(&format!(
"/api/releases/latest?asset={}&os={}&arch={}",
asset, os, arch
));
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
url_string += "&";
url_string += param;
}
let mut response = client.get(&url_string, Default::default(), true).await?;
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
if !response.status().is_success() {
return Err(anyhow!(
"failed to fetch release: {:?}",
String::from_utf8_lossy(&body),
));
}
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
"error deserializing release {:?}",
String::from_utf8_lossy(&body),
)
})
}
}
async fn get_latest_release(
this: &Model<Self>,
asset: &str,
@@ -565,7 +481,38 @@ impl AutoUpdater {
release_channel: Option<ReleaseChannel>,
cx: &mut AsyncAppContext,
) -> Result<JsonRelease> {
Self::get_release(this, asset, os, arch, None, release_channel, cx).await
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
let mut url_string = client.build_url(&format!(
"/api/releases/latest?asset={}&os={}&arch={}",
asset, os, arch
));
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
url_string += "&";
url_string += param;
}
let mut response = client.get(&url_string, Default::default(), true).await?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading release")?;
if !response.status().is_success() {
Err(anyhow!(
"failed to fetch release: {:?}",
String::from_utf8_lossy(&body),
))?;
}
serde_json::from_slice(body.as_slice()).with_context(|| {
format!(
"error deserializing release {:?}",
String::from_utf8_lossy(&body),
)
})
}
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
@@ -613,12 +560,6 @@ impl AutoUpdater {
"linux" => Ok("zed.tar.gz"),
_ => Err(anyhow!("not supported: {:?}", OS)),
}?;
anyhow::ensure!(
which("rsync").is_ok(),
"Aborting. Could not find rsync which is required for auto-updates."
);
let downloaded_asset = temp_dir.path().join(filename);
download_release(&downloaded_asset, release, client, &cx).await?;
@@ -680,19 +621,7 @@ async fn download_remote_server_binary(
client: Arc<HttpClientWithUrl>,
cx: &AsyncAppContext,
) -> Result<()> {
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
let mut temp_file = File::create(&temp).await?;
let update_request_body = build_remote_server_update_request_body(cx)?;
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut temp_file).await?;
smol::fs::rename(&temp, &target_path).await?;
Ok(())
}
fn build_remote_server_update_request_body(cx: &AsyncAppContext) -> Result<UpdateRequestBody> {
let mut target_file = File::create(&target_path).await?;
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
let telemetry = Client::global(cx).telemetry().clone();
let is_staff = telemetry.is_staff();
@@ -708,14 +637,17 @@ fn build_remote_server_update_request_body(cx: &AsyncAppContext) -> Result<Updat
is_staff,
)
})?;
Ok(UpdateRequestBody {
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
installation_id,
release_channel,
telemetry: telemetry_enabled,
is_staff,
destination: "remote",
})
})?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut target_file).await?;
Ok(())
}
async fn download_release(

View File

@@ -1194,15 +1194,26 @@ impl Room {
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<u64>> {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = if let Some(dev_server_project_id) = project.read(cx).dev_server_project_id()
{
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: vec![],
dev_server_project_id: Some(dev_server_project_id.0),
is_ssh_project: false,
})
} else {
if let Some(project_id) = project.read(cx).remote_id() {
return Task::ready(Ok(project_id));
}
let request = self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
is_ssh_project: project.read(cx).is_via_ssh(),
});
self.client.request(proto::ShareProject {
room_id: self.id(),
worktrees: project.read(cx).worktree_metadata_protos(cx),
dev_server_project_id: None,
is_ssh_project: project.read(cx).is_via_ssh(),
})
};
cx.spawn(|this, mut cx| async move {
let response = request.await?;

View File

@@ -15,6 +15,7 @@ pub enum CliRequest {
urls: Vec<String>,
wait: bool,
open_new_workspace: Option<bool>,
dev_server_token: Option<String>,
env: Option<HashMap<String, String>>,
},
}

View File

@@ -151,12 +151,6 @@ fn main() -> Result<()> {
}
}
if let Some(_) = args.dev_server_token {
return Err(anyhow::anyhow!(
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
))?;
}
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
let exit_status = exit_status.clone();
move || {
@@ -168,6 +162,7 @@ fn main() -> Result<()> {
urls,
wait: args.wait,
open_new_workspace,
dev_server_token: args.dev_server_token,
env,
})?;

View File

@@ -30,6 +30,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use socks::connect_socks_proxy_stream;
use std::fmt;
use std::pin::Pin;
use std::{
any::TypeId,
@@ -53,6 +54,15 @@ pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct DevServerToken(pub String);
impl fmt::Display for DevServerToken {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
static ZED_SERVER_URL: LazyLock<Option<String>> =
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
@@ -294,14 +304,20 @@ struct ClientState {
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Credentials {
pub user_id: u64,
pub access_token: String,
pub enum Credentials {
DevServer { token: DevServerToken },
User { user_id: u64, access_token: String },
}
impl Credentials {
pub fn authorization_header(&self) -> String {
format!("{} {}", self.user_id, self.access_token)
match self {
Credentials::DevServer { token } => format!("dev-server-token {}", token),
Credentials::User {
user_id,
access_token,
} => format!("{} {}", user_id, access_token),
}
}
}
@@ -584,11 +600,11 @@ impl Client {
}
pub fn user_id(&self) -> Option<u64> {
self.state
.read()
.credentials
.as_ref()
.map(|credentials| credentials.user_id)
if let Some(Credentials::User { user_id, .. }) = self.state.read().credentials.as_ref() {
Some(*user_id)
} else {
None
}
}
pub fn peer_id(&self) -> Option<PeerId> {
@@ -777,6 +793,11 @@ impl Client {
.is_some()
}
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
self.state.write().credentials = Some(Credentials::DevServer { token });
self
}
#[async_recursion(?Send)]
pub async fn authenticate_and_connect(
self: &Arc<Self>,
@@ -827,7 +848,9 @@ impl Client {
}
}
let credentials = credentials.unwrap();
self.set_id(credentials.user_id);
if let Credentials::User { user_id, .. } = &credentials {
self.set_id(*user_id);
}
if was_disconnected {
self.set_status(Status::Connecting, cx);
@@ -843,8 +866,9 @@ impl Client {
Ok(conn) => {
self.state.write().credentials = Some(credentials.clone());
if !read_from_provider && IMPERSONATE_LOGIN.is_none() {
self.credentials_provider.write_credentials(credentials.user_id, credentials.access_token, cx).await.log_err();
if let Credentials::User{user_id, access_token} = credentials {
self.credentials_provider.write_credentials(user_id, access_token, cx).await.log_err();
}
}
futures::select_biased! {
@@ -1277,7 +1301,7 @@ impl Client {
.decrypt_string(&access_token)
.context("failed to decrypt access token")?;
Ok(Credentials {
Ok(Credentials::User {
user_id: user_id.parse()?,
access_token,
})
@@ -1398,7 +1422,7 @@ impl Client {
// Use the admin API token to authenticate as the impersonated user.
api_token.insert_str(0, "ADMIN_TOKEN:");
Ok(Credentials {
Ok(Credentials::User {
user_id: response.user.id,
access_token: api_token,
})
@@ -1643,7 +1667,7 @@ impl CredentialsProvider for DevelopmentCredentialsProvider {
let credentials: DevelopmentCredentials = serde_json::from_slice(&json).log_err()?;
Some(Credentials {
Some(Credentials::User {
user_id: credentials.user_id,
access_token: credentials.access_token,
})
@@ -1697,7 +1721,7 @@ impl CredentialsProvider for KeychainCredentialsProvider {
.await
.log_err()??;
Some(Credentials {
Some(Credentials::User {
user_id: user_id.parse().ok()?,
access_token: String::from_utf8(access_token).ok()?,
})
@@ -1831,7 +1855,7 @@ mod tests {
// Time out when client tries to connect.
client.override_authenticate(move |cx| {
cx.background_executor().spawn(async move {
Ok(Credentials {
Ok(Credentials::User {
user_id,
access_token: "token".into(),
})

View File

@@ -49,7 +49,7 @@ impl FakeServer {
let mut state = state.lock();
state.auth_count += 1;
let access_token = state.access_token.to_string();
Ok(Credentials {
Ok(Credentials::User {
user_id: client_user_id,
access_token,
})
@@ -73,7 +73,7 @@ impl FakeServer {
}
if credentials
!= (Credentials {
!= (Credentials::User {
user_id: client_user_id,
access_token: state.lock().access_token.to_string(),
})

View File

@@ -28,6 +28,9 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct DevServerId(pub u64);
#[derive(
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
)]

View File

@@ -86,6 +86,7 @@ client = { workspace = true, features = ["test-support"] }
collab_ui = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
ctor.workspace = true
dev_server_projects.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
file_finder.workspace = true
@@ -93,6 +94,7 @@ fs = { workspace = true, features = ["test-support"] }
git = { workspace = true, features = ["test-support"] }
git_hosting_providers.workspace = true
gpui = { workspace = true, features = ["test-support"] }
headless.workspace = true
hyper.workspace = true
indoc.workspace = true
language = { workspace = true, features = ["test-support"] }

View File

@@ -11,8 +11,7 @@ CREATE TABLE "users" (
"metrics_id" TEXT,
"github_user_id" INTEGER NOT NULL,
"accepted_tos_at" TIMESTAMP WITHOUT TIME ZONE,
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE,
"custom_llm_monthly_allowance_in_cents" INTEGER
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE
);
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
@@ -79,10 +78,10 @@ CREATE TABLE "worktree_entries" (
"id" INTEGER NOT NULL,
"is_dir" BOOL NOT NULL,
"path" VARCHAR NOT NULL,
"canonical_path" TEXT,
"inode" INTEGER NOT NULL,
"mtime_seconds" INTEGER NOT NULL,
"mtime_nanos" INTEGER NOT NULL,
"is_symlink" BOOL NOT NULL,
"is_external" BOOL NOT NULL,
"is_ignored" BOOL NOT NULL,
"is_deleted" BOOL NOT NULL,

View File

@@ -1,2 +0,0 @@
ALTER TABLE worktree_entries ADD COLUMN canonical_path text;
ALTER TABLE worktree_entries ALTER COLUMN is_symlink SET DEFAULT false;

View File

@@ -1 +0,0 @@
alter table users add column custom_llm_monthly_allowance_in_cents integer;

View File

@@ -34,7 +34,7 @@ use crate::{
db::{billing_subscription::StripeSubscriptionStatus, UserId},
llm::db::LlmDatabase,
};
use crate::{AppState, Cents, Error, Result};
use crate::{AppState, Error, Result};
pub fn router() -> Router {
Router::new()
@@ -226,13 +226,6 @@ async fn create_billing_subscription(
))?
};
if app.db.has_active_billing_subscription(user.id).await? {
return Err(Error::http(
StatusCode::CONFLICT,
"user already has an active subscription".into(),
));
}
let customer_id =
if let Some(existing_customer) = app.db.get_billing_customer_by_user_id(user.id).await? {
CustomerId::from_str(&existing_customer.stripe_customer_id)
@@ -252,10 +245,7 @@ async fn create_billing_subscription(
let default_model = llm_db.model(rpc::LanguageModelProvider::Anthropic, "claude-3-5-sonnet")?;
let stripe_model = stripe_billing.register_model(default_model).await?;
let success_url = format!(
"{}/account?checkout_complete=1",
app.config.zed_dot_dev_url()
);
let success_url = format!("{}/account", app.config.zed_dot_dev_url());
let checkout_session_url = stripe_billing
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
.await?;
@@ -665,33 +655,6 @@ async fn handle_customer_subscription_event(
)
.await?;
} else {
// If the user already has an active billing subscription, ignore the
// event and return an `Ok` to signal that it was processed
// successfully.
//
// There is the possibility that this could cause us to not create a
// subscription in the following scenario:
//
// 1. User has an active subscription A
// 2. User cancels subscription A
// 3. User creates a new subscription B
// 4. We process the new subscription B before the cancellation of subscription A
// 5. User ends up with no subscriptions
//
// In theory this situation shouldn't arise as we try to process the events in the order they occur.
if app
.db
.has_active_billing_subscription(billing_customer.user_id)
.await?
{
log::info!(
"user {user_id} already has an active subscription, skipping creation of subscription {subscription_id}",
user_id = billing_customer.user_id,
subscription_id = subscription.id
);
return Ok(());
}
app.db
.create_billing_subscription(&CreateBillingSubscriptionParams {
billing_customer_id: billing_customer.id,
@@ -717,9 +680,7 @@ struct GetMonthlySpendParams {
#[derive(Debug, Serialize)]
struct GetMonthlySpendResponse {
monthly_free_tier_spend_in_cents: u32,
monthly_free_tier_allowance_in_cents: u32,
monthly_spend_in_cents: u32,
monthly_spend_in_cents: i32,
}
async fn get_monthly_spend(
@@ -739,22 +700,13 @@ async fn get_monthly_spend(
));
};
let free_tier = user
.custom_llm_monthly_allowance_in_cents
.map(|allowance| Cents(allowance as u32))
.unwrap_or(FREE_TIER_MONTHLY_SPENDING_LIMIT);
let spending_for_month = llm_db
let monthly_spend = llm_db
.get_user_spending_for_month(user.id, Utc::now())
.await?;
let free_tier_spend = Cents::min(spending_for_month, free_tier);
let monthly_spend = spending_for_month.saturating_sub(free_tier);
.await?
.saturating_sub(FREE_TIER_MONTHLY_SPENDING_LIMIT);
Ok(Json(GetMonthlySpendResponse {
monthly_free_tier_spend_in_cents: free_tier_spend.0,
monthly_free_tier_allowance_in_cents: free_tier.0,
monthly_spend_in_cents: monthly_spend.0,
monthly_spend_in_cents: monthly_spend.0 as i32,
}))
}

View File

@@ -1,5 +1,5 @@
use crate::{
db::{self, AccessTokenId, Database, UserId},
db::{self, dev_server, AccessTokenId, Database, DevServerId, UserId},
rpc::Principal,
AppState, Error, Result,
};
@@ -44,10 +44,19 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
let first = auth_header.next().unwrap_or("");
if first == "dev-server-token" {
Err(Error::http(
StatusCode::UNAUTHORIZED,
"Dev servers were removed in Zed 0.157 please upgrade to SSH remoting".to_string(),
))?;
let dev_server_token = auth_header.next().ok_or_else(|| {
Error::http(
StatusCode::BAD_REQUEST,
"missing dev-server-token token in authorization header".to_string(),
)
})?;
let dev_server = verify_dev_server_token(dev_server_token, &state.db)
.await
.map_err(|e| Error::http(StatusCode::UNAUTHORIZED, format!("{}", e)))?;
req.extensions_mut()
.insert(Principal::DevServer(dev_server));
return Ok::<_, Error>(next.run(req).await);
}
let user_id = UserId(first.parse().map_err(|_| {
@@ -231,6 +240,41 @@ pub async fn verify_access_token(
})
}
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
format!("{}.{}", id, access_token)
}
pub async fn verify_dev_server_token(
dev_server_token: &str,
db: &Arc<Database>,
) -> anyhow::Result<dev_server::Model> {
let (id, token) = split_dev_server_token(dev_server_token)?;
let token_hash = hash_access_token(token);
let server = db.get_dev_server(id).await?;
if server
.hashed_token
.as_bytes()
.ct_eq(token_hash.as_ref())
.into()
{
Ok(server)
} else {
Err(anyhow!("wrong token for dev server"))
}
}
// a dev_server_token has the format <id>.<base64>. This is to make them
// relatively easy to copy/paste around.
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
let mut parts = dev_server_token.splitn(2, '.');
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
let token = parts
.next()
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
Ok((id, token))
}
#[cfg(test)]
mod test {
use rand::thread_rng;

View File

@@ -726,6 +726,7 @@ pub struct Project {
pub collaborators: Vec<ProjectCollaborator>,
pub worktrees: BTreeMap<u64, Worktree>,
pub language_servers: Vec<proto::LanguageServer>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
pub struct ProjectCollaborator {

View File

@@ -79,6 +79,7 @@ id_type!(ChannelChatParticipantId);
id_type!(ChannelId);
id_type!(ChannelMemberId);
id_type!(ContactId);
id_type!(DevServerId);
id_type!(ExtensionId);
id_type!(FlagId);
id_type!(FollowerId);
@@ -88,6 +89,7 @@ id_type!(NotificationId);
id_type!(NotificationKindId);
id_type!(ProjectCollaboratorId);
id_type!(ProjectId);
id_type!(DevServerProjectId);
id_type!(ReplicaId);
id_type!(RoomId);
id_type!(RoomParticipantId);
@@ -275,6 +277,12 @@ impl From<ChannelVisibility> for i32 {
}
}
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
pub enum PrincipalId {
UserId(UserId),
DevServerId(DevServerId),
}
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Capability {

View File

@@ -8,6 +8,8 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
pub mod dev_server_projects;
pub mod dev_servers;
pub mod embeddings;
pub mod extensions;
pub mod hosted_projects;

View File

@@ -1 +1,365 @@
use anyhow::anyhow;
use rpc::{
proto::{self},
ConnectionId,
};
use sea_orm::{
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
IntoActiveModel, ModelTrait, QueryFilter,
};
use crate::db::ProjectId;
use super::{
dev_server, dev_server_project, project, project_collaborator, worktree, Database, DevServerId,
DevServerProjectId, RejoinedProject, ResharedProject, ServerId, UserId,
};
impl Database {
pub async fn get_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
) -> crate::Result<dev_server_project::Model> {
self.transaction(|tx| async move {
Ok(
dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?,
)
})
.await
}
pub async fn get_projects_for_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<Vec<proto::DevServerProject>> {
self.transaction(|tx| async move {
self.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await
})
.await
}
pub async fn get_projects_for_dev_server_internal(
&self,
dev_server_id: DevServerId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<proto::DevServerProject>> {
let servers = dev_server_project::Entity::find()
.filter(dev_server_project::Column::DevServerId.eq(dev_server_id))
.find_also_related(project::Entity)
.all(tx)
.await?;
Ok(servers
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect())
}
pub async fn dev_server_project_ids_for_user(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<DevServerProjectId>> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.find_with_related(dev_server_project::Entity)
.all(tx)
.await?;
Ok(dev_servers
.into_iter()
.flat_map(|(_, projects)| projects.into_iter().map(|p| p.id))
.collect())
}
pub async fn owner_for_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
tx: &DatabaseTransaction,
) -> crate::Result<UserId> {
let dev_server = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(tx)
.await?
.and_then(|(_, dev_server)| dev_server)
.ok_or_else(|| anyhow!("no dev server project"))?;
Ok(dev_server.user_id)
}
pub async fn get_stale_dev_server_projects(
&self,
connection: ConnectionId,
) -> crate::Result<Vec<ProjectId>> {
self.transaction(|tx| async move {
let projects = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::HostConnectionId.eq(connection.id))
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
)
.all(&*tx)
.await?;
Ok(projects.into_iter().map(|p| p.id).collect())
})
.await
}
pub async fn create_dev_server_project(
&self,
dev_server_id: DevServerId,
path: &str,
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let project = dev_server_project::Entity::insert(dev_server_project::ActiveModel {
id: ActiveValue::NotSet,
dev_server_id: ActiveValue::Set(dev_server_id),
paths: ActiveValue::Set(dev_server_project::JSONPaths(vec![path.to_string()])),
})
.exec_with_returning(&*tx)
.await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn update_dev_server_project(
&self,
id: DevServerProjectId,
paths: &[String],
user_id: UserId,
) -> crate::Result<(dev_server_project::Model, proto::DevServerProjectsUpdate)> {
self.transaction(move |tx| async move {
let paths = paths.to_owned();
let Some((project, Some(dev_server))) = dev_server_project::Entity::find_by_id(id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
else {
return Err(anyhow!("no such dev server project"))?;
};
if dev_server.user_id != user_id {
return Err(anyhow!("not your dev server"))?;
}
let mut project = project.into_active_model();
project.paths = ActiveValue::Set(dev_server_project::JSONPaths(paths));
let project = project.update(&*tx).await?;
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((project, status))
})
.await
}
pub async fn delete_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<(Vec<proto::DevServerProject>, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
project::Entity::delete_many()
.filter(project::Column::DevServerProjectId.eq(dev_server_project_id))
.exec(&*tx)
.await?;
let result = dev_server_project::Entity::delete_by_id(dev_server_project_id)
.exec(&*tx)
.await?;
if result.rows_affected != 1 {
return Err(anyhow!(
"no dev server project with id {}",
dev_server_project_id
))?;
}
let status = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
let projects = self
.get_projects_for_dev_server_internal(dev_server_id, &tx)
.await?;
Ok((projects, status))
})
.await
}
pub async fn share_dev_server_project(
&self,
dev_server_project_id: DevServerProjectId,
dev_server_id: DevServerId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> crate::Result<(
proto::DevServerProject,
UserId,
proto::DevServerProjectsUpdate,
)> {
self.transaction(|tx| async move {
let dev_server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev server with id {}", dev_server_id))?;
let dev_server_project = dev_server_project::Entity::find_by_id(dev_server_project_id)
.one(&*tx)
.await?
.ok_or_else(|| {
anyhow!("no dev server project with id {}", dev_server_project_id)
})?;
if dev_server_project.dev_server_id != dev_server_id {
return Err(anyhow!("dev server project shared from wrong server"))?;
}
let project = project::ActiveModel {
room_id: ActiveValue::Set(None),
host_user_id: ActiveValue::Set(None),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(Some(dev_server_project_id)),
}
.insert(&*tx)
.await?;
if !worktrees.is_empty() {
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
worktree::ActiveModel {
id: ActiveValue::set(worktree.id as i64),
project_id: ActiveValue::set(project.id),
abs_path: ActiveValue::set(worktree.abs_path.clone()),
root_name: ActiveValue::set(worktree.root_name.clone()),
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
}
}))
.exec(&*tx)
.await?;
}
let status = self
.dev_server_projects_update_internal(dev_server.user_id, &tx)
.await?;
Ok((
dev_server_project.to_proto(Some(project)),
dev_server.user_id,
status,
))
})
.await
}
pub async fn reshare_dev_server_projects(
&self,
reshared_projects: &Vec<proto::UpdateProject>,
dev_server_id: DevServerId,
connection: ConnectionId,
) -> crate::Result<Vec<ResharedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for reshared_project in reshared_projects {
let project_id = ProjectId::from_proto(reshared_project.project_id);
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("project does not exist"))?;
if dev_server_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
return Err(anyhow!("dev server project reshared from wrong server"))?;
}
let Ok(old_connection_id) = project.host_connection() else {
return Err(anyhow!("dev server project was not shared"))?;
};
project::Entity::update(project::ActiveModel {
id: ActiveValue::set(project_id),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
..Default::default()
})
.exec(&*tx)
.await?;
let collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
.await?;
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
.await?;
ret.push(super::ResharedProject {
id: project_id,
old_connection_id,
collaborators: collaborators
.iter()
.map(|collaborator| super::ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect(),
worktrees: reshared_project.worktrees.clone(),
});
}
Ok(ret)
})
.await
}
pub async fn rejoin_dev_server_projects(
&self,
rejoined_projects: &Vec<proto::RejoinProject>,
user_id: UserId,
connection_id: ConnectionId,
) -> crate::Result<Vec<RejoinedProject>> {
self.transaction(|tx| async move {
let mut ret = Vec::new();
for rejoined_project in rejoined_projects {
if let Some(project) = self
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
.await?
{
ret.push(project);
}
}
Ok(ret)
})
.await
}
}

View File

@@ -1 +1,222 @@
use rpc::proto;
use sea_orm::{
ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, IntoActiveModel, QueryFilter,
};
use super::{dev_server, dev_server_project, Database, DevServerId, UserId};
impl Database {
pub async fn get_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?)
})
.await
}
pub async fn get_dev_server_for_user(
&self,
dev_server_id: DevServerId,
user_id: UserId,
) -> crate::Result<dev_server::Model> {
self.transaction(|tx| async move {
let server = dev_server::Entity::find_by_id(dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow::anyhow!("no dev server with id {}", dev_server_id))?;
if server.user_id != user_id {
return Err(anyhow::anyhow!(
"dev server {} is not owned by user {}",
dev_server_id,
user_id
))?;
}
Ok(server)
})
.await
}
pub async fn get_dev_servers(&self, user_id: UserId) -> crate::Result<Vec<dev_server::Model>> {
self.transaction(|tx| async move {
Ok(dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(&*tx)
.await?)
})
.await
}
pub async fn dev_server_projects_update(
&self,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
self.dev_server_projects_update_internal(user_id, &tx).await
})
.await
}
pub async fn dev_server_projects_update_internal(
&self,
user_id: UserId,
tx: &DatabaseTransaction,
) -> crate::Result<proto::DevServerProjectsUpdate> {
let dev_servers = dev_server::Entity::find()
.filter(dev_server::Column::UserId.eq(user_id))
.all(tx)
.await?;
let dev_server_projects = dev_server_project::Entity::find()
.filter(
dev_server_project::Column::DevServerId
.is_in(dev_servers.iter().map(|d| d.id).collect::<Vec<_>>()),
)
.find_also_related(super::project::Entity)
.all(tx)
.await?;
Ok(proto::DevServerProjectsUpdate {
dev_servers: dev_servers
.into_iter()
.map(|d| d.to_proto(proto::DevServerStatus::Offline))
.collect(),
dev_server_projects: dev_server_projects
.into_iter()
.map(|(dev_server_project, project)| dev_server_project.to_proto(project))
.collect(),
})
}
pub async fn create_dev_server(
&self,
name: &str,
ssh_connection_string: Option<&str>,
hashed_access_token: &str,
user_id: UserId,
) -> crate::Result<(dev_server::Model, proto::DevServerProjectsUpdate)> {
self.transaction(|tx| async move {
if name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
id: ActiveValue::NotSet,
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
name: ActiveValue::Set(name.trim().to_string()),
user_id: ActiveValue::Set(user_id),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
})
.exec_with_returning(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok((dev_server, dev_server_projects))
})
.await
}
pub async fn update_dev_server_token(
&self,
id: DevServerId,
hashed_token: &str,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
hashed_token: ActiveValue::Set(hashed_token.to_string()),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn rename_dev_server(
&self,
id: DevServerId,
name: &str,
ssh_connection_string: Option<&str>,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id || name.trim().is_empty() {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server::Entity::update(dev_server::ActiveModel {
name: ActiveValue::Set(name.trim().to_string()),
ssh_connection_string: ActiveValue::Set(
ssh_connection_string.map(ToOwned::to_owned),
),
..dev_server.clone().into_active_model()
})
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
pub async fn delete_dev_server(
&self,
id: DevServerId,
user_id: UserId,
) -> crate::Result<proto::DevServerProjectsUpdate> {
self.transaction(|tx| async move {
let Some(dev_server) = dev_server::Entity::find_by_id(id).one(&*tx).await? else {
return Err(anyhow::anyhow!("no dev server with id {}", id))?;
};
if dev_server.user_id != user_id {
return Err(anyhow::anyhow!(proto::ErrorCode::Forbidden))?;
}
dev_server_project::Entity::delete_many()
.filter(dev_server_project::Column::DevServerId.eq(id))
.exec(&*tx)
.await?;
dev_server::Entity::delete(dev_server.into_active_model())
.exec(&*tx)
.await?;
let dev_server_projects = self
.dev_server_projects_update_internal(user_id, &tx)
.await?;
Ok(dev_server_projects)
})
.await
}
}

View File

@@ -32,6 +32,7 @@ impl Database {
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
dev_server_project_id: Option<DevServerProjectId>,
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -60,6 +61,38 @@ impl Database {
return Err(anyhow!("guests cannot share projects"))?;
}
if let Some(dev_server_project_id) = dev_server_project_id {
let project = project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(Some(dev_server_project_id)))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no remote project"))?;
let (_, dev_server) = dev_server_project::Entity::find_by_id(dev_server_project_id)
.find_also_related(dev_server::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no dev_server_project"))?;
if !dev_server.is_some_and(|dev_server| dev_server.user_id == participant.user_id) {
return Err(anyhow!("not your dev server"))?;
}
if project.room_id.is_some() {
return Err(anyhow!("project already shared"))?;
};
let project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(Some(room_id)),
..project.into_active_model()
})
.exec(&*tx)
.await?;
let room = self.get_room(room_id, &tx).await?;
return Ok((project.id, room));
}
let project = project::ActiveModel {
room_id: ActiveValue::set(Some(participant.room_id)),
host_user_id: ActiveValue::set(Some(participant.user_id)),
@@ -69,6 +102,7 @@ impl Database {
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
dev_server_project_id: ActiveValue::Set(None),
}
.insert(&*tx)
.await?;
@@ -122,6 +156,7 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
user_id: Option<UserId>,
) -> Result<TransactionGuard<(bool, Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
@@ -137,6 +172,25 @@ impl Database {
if project.host_connection()? == connection {
return Ok((true, room, guest_connection_ids));
}
if let Some(dev_server_project_id) = project.dev_server_project_id {
if let Some(user_id) = user_id {
if user_id
!= self
.owner_for_dev_server_project(dev_server_project_id, &tx)
.await?
{
Err(anyhow!("cannot unshare a project hosted by another user"))?
}
project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(&*tx)
.await?;
return Ok((false, room, guest_connection_ids));
}
}
Err(anyhow!("cannot unshare a project hosted by another user"))?
})
.await
@@ -218,16 +272,6 @@ impl Database {
update: &proto::UpdateWorktree,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
if update.removed_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
|| update.updated_entries.len() > proto::MAX_WORKTREE_UPDATE_MAX_CHUNK_SIZE
{
return Err(anyhow!(
"invalid worktree update. removed entries: {}, updated entries: {}",
update.removed_entries.len(),
update.updated_entries.len()
))?;
}
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
@@ -273,7 +317,7 @@ impl Database {
inode: ActiveValue::set(entry.inode as i64),
mtime_seconds: ActiveValue::set(mtime.seconds as i64),
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
canonical_path: ActiveValue::set(entry.canonical_path.clone()),
is_symlink: ActiveValue::set(entry.is_symlink),
is_ignored: ActiveValue::set(entry.is_ignored),
is_external: ActiveValue::set(entry.is_external),
git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
@@ -294,7 +338,7 @@ impl Database {
worktree_entry::Column::Inode,
worktree_entry::Column::MtimeSeconds,
worktree_entry::Column::MtimeNanos,
worktree_entry::Column::CanonicalPath,
worktree_entry::Column::IsSymlink,
worktree_entry::Column::IsIgnored,
worktree_entry::Column::GitStatus,
worktree_entry::Column::ScanId,
@@ -579,6 +623,17 @@ impl Database {
.await
}
pub async fn find_dev_server_project(&self, id: DevServerProjectId) -> Result<project::Model> {
self.transaction(|tx| async move {
Ok(project::Entity::find()
.filter(project::Column::DevServerProjectId.eq(id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?)
})
.await
}
/// Adds the given connection to the specified project
/// in the current room.
pub async fn join_project(
@@ -589,7 +644,13 @@ impl Database {
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
self.project_transaction(project_id, |tx| async move {
let (project, role) = self
.access_project(project_id, connection, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
self.join_project_internal(project, user_id, connection, role, &tx)
.await
@@ -674,7 +735,7 @@ impl Database {
seconds: db_entry.mtime_seconds as u64,
nanos: db_entry.mtime_nanos as u32,
}),
canonical_path: db_entry.canonical_path,
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
git_status: db_entry.git_status.map(|status| status as i32),
@@ -780,6 +841,7 @@ impl Database {
worktree_id: None,
})
.collect(),
dev_server_project_id: project.dev_server_project_id,
};
Ok((project, replica_id as ReplicaId))
}
@@ -935,14 +997,29 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
principal_id: PrincipalId,
capability: Capability,
tx: &DatabaseTransaction,
) -> Result<(project::Model, ChannelRole)> {
let project = project::Entity::find_by_id(project_id)
let (mut project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let user_id = match principal_id {
PrincipalId::DevServerId(_) => {
if project
.host_connection()
.is_ok_and(|connection| connection == connection_id)
{
return Ok((project, ChannelRole::Admin));
}
return Err(anyhow!("not the project host"))?;
}
PrincipalId::UserId(user_id) => user_id,
};
let role_from_room = if let Some(room_id) = project.room_id {
room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
@@ -953,8 +1030,34 @@ impl Database {
} else {
None
};
let role_from_dev_server = if let Some(dev_server_project) = dev_server_project {
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such channel"))?;
if user_id == dev_server.user_id {
// If the user left the room "uncleanly" they may rejoin the
// remote project before leave_room runs. IN that case kick
// the project out of the room pre-emptively.
if role_from_room.is_none() {
project = project::Entity::update(project::ActiveModel {
room_id: ActiveValue::Set(None),
..project.into_active_model()
})
.exec(tx)
.await?;
}
Some(ChannelRole::Admin)
} else {
None
}
} else {
None
};
let role = role_from_room.unwrap_or(ChannelRole::Banned);
let role = role_from_dev_server
.or(role_from_room)
.unwrap_or(ChannelRole::Banned);
match capability {
Capability::ReadWrite => {
@@ -977,10 +1080,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadOnly, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
)
.await?;
project.host_connection()
})
@@ -993,10 +1103,17 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(project_id, connection_id, Capability::ReadWrite, &tx)
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadWrite,
&tx,
)
.await?;
project.host_connection()
})
@@ -1004,16 +1121,47 @@ impl Database {
.map(|guard| guard.into_inner())
}
/// Returns the host connection for a request to join a shared project.
pub async fn host_for_owner_project_request(
&self,
project_id: ProjectId,
_connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, dev_server_project) = project::Entity::find_by_id(project_id)
.find_also_related(dev_server_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let Some(dev_server_project) = dev_server_project else {
return Err(anyhow!("not a dev server project"))?;
};
let dev_server = dev_server::Entity::find_by_id(dev_server_project.dev_server_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such dev server"))?;
if dev_server.user_id != user_id {
return Err(anyhow!("not your project"))?;
}
project.host_connection()
})
.await
.map(|guard| guard.into_inner())
}
pub async fn connections_for_buffer_update(
&self,
project_id: ProjectId,
principal_id: PrincipalId,
connection_id: ConnectionId,
capability: Capability,
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
// Authorize
let (project, _) = self
.access_project(project_id, connection_id, capability, &tx)
.access_project(project_id, connection_id, principal_id, capability, &tx)
.await?;
let host_connection_id = project.host_connection()?;

View File

@@ -659,7 +659,7 @@ impl Database {
seconds: db_entry.mtime_seconds as u64,
nanos: db_entry.mtime_nanos as u32,
}),
canonical_path: db_entry.canonical_path,
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
git_status: db_entry.git_status.map(|status| status as i32),
@@ -858,6 +858,25 @@ impl Database {
.all(&*tx)
.await?;
// if any project in the room has a remote-project-id that belongs to a dev server that this user owns.
let dev_server_projects_for_user = self
.dev_server_project_ids_for_user(leaving_participant.user_id, &tx)
.await?;
let dev_server_projects_to_unshare = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::RoomId.eq(room_id))
.add(
project::Column::DevServerProjectId
.is_in(dev_server_projects_for_user.clone()),
),
)
.all(&*tx)
.await?
.into_iter()
.map(|project| project.id)
.collect::<HashSet<_>>();
let mut left_projects = HashMap::default();
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.is_in(project_ids))
@@ -880,7 +899,9 @@ impl Database {
left_project.connection_ids.push(collaborator_connection_id);
}
if collaborator.is_host && collaborator.connection() == connection {
if (collaborator.is_host && collaborator.connection() == connection)
|| dev_server_projects_to_unshare.contains(&collaborator.project_id)
{
left_project.should_unshare = true;
}
}
@@ -923,6 +944,17 @@ impl Database {
.exec(&*tx)
.await?;
if !dev_server_projects_to_unshare.is_empty() {
project::Entity::update_many()
.filter(project::Column::Id.is_in(dev_server_projects_to_unshare))
.set(project::ActiveModel {
room_id: ActiveValue::Set(None),
..Default::default()
})
.exec(&*tx)
.await?;
}
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let deleted = if room.participants.is_empty() {
let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
@@ -1291,6 +1323,26 @@ impl Database {
project.worktree_root_names.push(db_worktree.root_name);
}
}
} else if let Some(dev_server_project_id) = db_project.dev_server_project_id {
let host = self
.owner_for_dev_server_project(dev_server_project_id, tx)
.await?;
if let Some((_, participant)) = participants
.iter_mut()
.find(|(_, v)| v.user_id == host.to_proto())
{
participant.projects.push(proto::ParticipantProject {
id: db_project.id.to_proto(),
worktree_root_names: Default::default(),
});
let project = participant.projects.last_mut().unwrap();
for db_worktree in db_worktrees {
if db_worktree.visible {
project.worktree_root_names.push(db_worktree.root_name);
}
}
}
}
}

View File

@@ -13,6 +13,8 @@ pub mod channel_message;
pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
pub mod dev_server;
pub mod dev_server_project;
pub mod embedding;
pub mod extension;
pub mod extension_version;

View File

@@ -0,0 +1,39 @@
use crate::db::{DevServerId, UserId};
use rpc::proto;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_servers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerId,
pub name: String,
pub user_id: UserId,
pub hashed_token: String,
pub ssh_connection_string: Option<String>,
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::dev_server_project::Entity")]
RemoteProject,
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl Model {
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
proto::DevServer {
dev_server_id: self.id.to_proto(),
name: self.name.clone(),
status: status as i32,
ssh_connection_string: self.ssh_connection_string.clone(),
}
}
}

View File

@@ -0,0 +1,59 @@
use super::project;
use crate::db::{DevServerId, DevServerProjectId};
use rpc::proto;
use sea_orm::{entity::prelude::*, FromJsonQueryResult};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "dev_server_projects")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: DevServerProjectId,
pub dev_server_id: DevServerId,
pub paths: JSONPaths,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, FromJsonQueryResult)]
pub struct JSONPaths(pub Vec<String>);
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_one = "super::project::Entity")]
Project,
#[sea_orm(
belongs_to = "super::dev_server::Entity",
from = "Column::DevServerId",
to = "super::dev_server::Column::Id"
)]
DevServer,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl Related<super::dev_server::Entity> for Entity {
fn to() -> RelationDef {
Relation::DevServer.def()
}
}
impl Model {
pub fn to_proto(&self, project: Option<project::Model>) -> proto::DevServerProject {
proto::DevServerProject {
id: self.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
dev_server_id: self.dev_server_id.to_proto(),
path: self.paths().first().cloned().unwrap_or_default(),
paths: self.paths().clone(),
}
}
pub fn paths(&self) -> &Vec<String> {
&self.paths.0
}
}

View File

@@ -1,4 +1,4 @@
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use crate::db::{DevServerProjectId, HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -13,6 +13,7 @@ pub struct Model {
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
pub hosted_project_id: Option<HostedProjectId>,
pub dev_server_project_id: Option<DevServerProjectId>,
}
impl Model {
@@ -56,6 +57,12 @@ pub enum Relation {
to = "super::hosted_project::Column::Id"
)]
HostedProject,
#[sea_orm(
belongs_to = "super::dev_server_project::Entity",
from = "Column::DevServerProjectId",
to = "super::dev_server_project::Column::Id"
)]
RemoteProject,
}
impl Related<super::user::Entity> for Entity {
@@ -94,4 +101,10 @@ impl Related<super::hosted_project::Entity> for Entity {
}
}
impl Related<super::dev_server_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -21,7 +21,6 @@ pub struct Model {
pub metrics_id: Uuid,
pub created_at: NaiveDateTime,
pub accepted_tos_at: Option<NaiveDateTime>,
pub custom_llm_monthly_allowance_in_cents: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -16,12 +16,12 @@ pub struct Model {
pub mtime_seconds: i64,
pub mtime_nanos: i32,
pub git_status: Option<i64>,
pub is_symlink: bool,
pub is_ignored: bool,
pub is_external: bool,
pub is_deleted: bool,
pub scan_id: i64,
pub is_fifo: bool,
pub canonical_path: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -540,18 +540,18 @@ async fn test_project_count(db: &Arc<Database>) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false)
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, None)
.await
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);

View File

@@ -198,6 +198,10 @@ impl Config {
}
}
pub fn is_llm_billing_enabled(&self) -> bool {
self.stripe_api_key.is_some()
}
#[cfg(test)]
pub fn test() -> Self {
Self {

View File

@@ -459,27 +459,30 @@ async fn check_usage_limit(
Utc::now(),
)
.await?;
let free_tier = claims.free_tier_monthly_spending_limit();
if usage.spending_this_month >= free_tier {
if !claims.has_llm_subscription {
return Err(Error::http(
StatusCode::PAYMENT_REQUIRED,
"Maximum spending limit reached for this month.".to_string(),
));
}
if state.config.is_llm_billing_enabled() {
if usage.spending_this_month >= FREE_TIER_MONTHLY_SPENDING_LIMIT {
if !claims.has_llm_subscription {
return Err(Error::http(
StatusCode::PAYMENT_REQUIRED,
"Maximum spending limit reached for this month.".to_string(),
));
}
if (usage.spending_this_month - free_tier) >= Cents(claims.max_monthly_spend_in_cents) {
return Err(Error::Http(
StatusCode::FORBIDDEN,
"Maximum spending limit reached for this month.".to_string(),
[(
HeaderName::from_static(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME),
HeaderValue::from_static("true"),
)]
.into_iter()
.collect(),
));
if (usage.spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT)
>= Cents(claims.max_monthly_spend_in_cents)
{
return Err(Error::Http(
StatusCode::FORBIDDEN,
"Maximum spending limit reached for this month.".to_string(),
[(
HeaderName::from_static(MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME),
HeaderValue::from_static("true"),
)]
.into_iter()
.collect(),
));
}
}
}
@@ -624,6 +627,7 @@ where
impl<S> Drop for TokenCountingStream<S> {
fn drop(&mut self) {
let state = self.state.clone();
let is_llm_billing_enabled = state.config.is_llm_billing_enabled();
let claims = self.claims.clone();
let provider = self.provider;
let model = std::mem::take(&mut self.model);
@@ -637,9 +641,15 @@ impl<S> Drop for TokenCountingStream<S> {
provider,
&model,
tokens,
claims.has_llm_subscription,
// We're passing `false` here if LLM billing is not enabled
// so that we don't write any records to the
// `billing_events` table until we're ready to bill users.
if is_llm_billing_enabled {
claims.has_llm_subscription
} else {
false
},
Cents(claims.max_monthly_spend_in_cents),
claims.free_tier_monthly_spending_limit(),
Utc::now(),
)
.await

View File

@@ -1,5 +1,5 @@
use crate::db::UserId;
use crate::llm::Cents;
use crate::{db::UserId, llm::FREE_TIER_MONTHLY_SPENDING_LIMIT};
use chrono::{Datelike, Duration};
use futures::StreamExt as _;
use rpc::LanguageModelProvider;
@@ -299,7 +299,6 @@ impl LlmDatabase {
tokens: TokenUsage,
has_llm_subscription: bool,
max_monthly_spend: Cents,
free_tier_monthly_spending_limit: Cents,
now: DateTimeUtc,
) -> Result<Usage> {
self.transaction(|tx| async move {
@@ -411,9 +410,9 @@ impl LlmDatabase {
);
if !is_staff
&& spending_this_month > free_tier_monthly_spending_limit
&& spending_this_month > FREE_TIER_MONTHLY_SPENDING_LIMIT
&& has_llm_subscription
&& (spending_this_month - free_tier_monthly_spending_limit) <= max_monthly_spend
&& (spending_this_month - FREE_TIER_MONTHLY_SPENDING_LIMIT) <= max_monthly_spend
{
billing_event::ActiveModel {
id: ActiveValue::not_set(),

View File

@@ -66,7 +66,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
usage,
true,
max_monthly_spend,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -104,7 +103,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
usage_2,
true,
max_monthly_spend,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -134,7 +132,6 @@ async fn test_billing_limit_exceeded(db: &mut LlmDatabase) {
model,
usage_exceeding,
true,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
max_monthly_spend,
now,
)

View File

@@ -1,4 +1,3 @@
use crate::llm::FREE_TIER_MONTHLY_SPENDING_LIMIT;
use crate::{
db::UserId,
llm::db::{
@@ -50,7 +49,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -70,7 +68,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -127,7 +124,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -184,7 +180,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -227,7 +222,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await
@@ -265,7 +259,6 @@ async fn test_tracking_usage(db: &mut LlmDatabase) {
},
false,
Cents::ZERO,
FREE_TIER_MONTHLY_SPENDING_LIMIT,
now,
)
.await

View File

@@ -1,7 +1,8 @@
use crate::db::user;
use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
use crate::Cents;
use crate::{db::billing_preference, Config};
use crate::llm::DEFAULT_MAX_MONTHLY_SPEND;
use crate::{
db::{billing_preference, UserId},
Config,
};
use anyhow::{anyhow, Result};
use chrono::Utc;
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
@@ -21,7 +22,6 @@ pub struct LlmTokenClaims {
pub has_llm_closed_beta_feature_flag: bool,
pub has_llm_subscription: bool,
pub max_monthly_spend_in_cents: u32,
pub custom_llm_monthly_allowance_in_cents: Option<u32>,
pub plan: rpc::proto::Plan,
}
@@ -30,7 +30,8 @@ const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60);
impl LlmTokenClaims {
#[allow(clippy::too_many_arguments)]
pub fn create(
user: &user::Model,
user_id: UserId,
github_user_login: String,
is_staff: bool,
billing_preferences: Option<billing_preference::Model>,
has_llm_closed_beta_feature_flag: bool,
@@ -48,8 +49,8 @@ impl LlmTokenClaims {
iat: now.timestamp() as u64,
exp: (now + LLM_TOKEN_LIFETIME).timestamp() as u64,
jti: uuid::Uuid::new_v4().to_string(),
user_id: user.id.to_proto(),
github_user_login: user.github_login.clone(),
user_id: user_id.to_proto(),
github_user_login,
is_staff,
has_llm_closed_beta_feature_flag,
has_llm_subscription,
@@ -57,9 +58,6 @@ impl LlmTokenClaims {
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {
preferences.max_monthly_llm_usage_spending_in_cents as u32
}),
custom_llm_monthly_allowance_in_cents: user
.custom_llm_monthly_allowance_in_cents
.map(|allowance| allowance as u32),
plan,
};
@@ -91,12 +89,6 @@ impl LlmTokenClaims {
}
}
}
pub fn free_tier_monthly_spending_limit(&self) -> Cents {
self.custom_llm_monthly_allowance_in_cents
.map(Cents)
.unwrap_or(FREE_TIER_MONTHLY_SPENDING_LIMIT)
}
}
#[derive(Error, Debug)]

View File

@@ -84,8 +84,6 @@ async fn main() -> Result<()> {
let config = envy::from_env::<Config>().expect("error loading config");
init_tracing(&config);
init_panic_hook();
let mut app = Router::new()
.route("/", get(handle_root))
.route("/healthz", get(handle_liveness_probe))
@@ -380,20 +378,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
None
}
fn init_panic_hook() {
std::panic::set_hook(Box::new(move |panic_info| {
let panic_message = match panic_info.payload().downcast_ref::<&'static str>() {
Some(message) => *message,
None => match panic_info.payload().downcast_ref::<String>() {
Some(message) => message.as_str(),
None => "Box<Any>",
},
};
let backtrace = std::backtrace::Backtrace::force_capture();
let location = panic_info
.location()
.map(|loc| format!("{}:{}", loc.file(), loc.line()));
tracing::error!(panic = true, ?location, %panic_message, %backtrace, "Server Panic");
}));
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
use crate::db::{ChannelId, ChannelRole, UserId};
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use rpc::{proto, ConnectionId};
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
@@ -11,7 +11,9 @@ use tracing::instrument;
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
channels: ChannelPool,
offline_dev_servers: HashSet<DevServerId>,
}
#[derive(Default, Serialize)]
@@ -30,13 +32,13 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 157, 0)
self.0 >= SemanticVersion::new(0, 151, 0)
}
}
#[derive(Serialize)]
pub struct Connection {
pub user_id: UserId,
pub principal_id: PrincipalId,
pub admin: bool,
pub zed_version: ZedVersion,
}
@@ -45,6 +47,7 @@ impl ConnectionPool {
pub fn reset(&mut self) {
self.connections.clear();
self.connected_users.clear();
self.connected_dev_servers.clear();
self.channels.clear();
}
@@ -63,7 +66,7 @@ impl ConnectionPool {
self.connections.insert(
connection_id,
Connection {
user_id,
principal_id: PrincipalId::UserId(user_id),
admin,
zed_version,
},
@@ -72,6 +75,25 @@ impl ConnectionPool {
connected_user.connection_ids.insert(connection_id);
}
pub fn add_dev_server(
&mut self,
connection_id: ConnectionId,
dev_server_id: DevServerId,
zed_version: ZedVersion,
) {
self.connections.insert(
connection_id,
Connection {
principal_id: PrincipalId::DevServerId(dev_server_id),
admin: false,
zed_version,
},
);
self.connected_dev_servers
.insert(dev_server_id, connection_id);
}
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
@@ -79,18 +101,28 @@ impl ConnectionPool {
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
let user_id = connection.user_id;
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
};
match connection.principal_id {
PrincipalId::UserId(user_id) => {
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
}
}
PrincipalId::DevServerId(dev_server_id) => {
self.connected_dev_servers.remove(&dev_server_id);
self.offline_dev_servers.remove(&dev_server_id);
}
}
self.connections.remove(&connection_id).unwrap();
Ok(())
}
pub fn set_dev_server_offline(&mut self, dev_server_id: DevServerId) {
self.offline_dev_servers.insert(dev_server_id);
}
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
self.connections.values()
}
@@ -115,6 +147,42 @@ impl ConnectionPool {
.copied()
}
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
if self.dev_server_connection_id(dev_server_id).is_some()
&& !self.offline_dev_servers.contains(&dev_server_id)
{
proto::DevServerStatus::Online
} else {
proto::DevServerStatus::Offline
}
}
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn online_dev_server_connection_id(
&self,
dev_server_id: DevServerId,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) => Ok(*cid),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn dev_server_connection_id_supporting(
&self,
dev_server_id: DevServerId,
required: ZedVersion,
) -> Result<ConnectionId> {
match self.connected_dev_servers.get(&dev_server_id) {
Some(cid) if self.connections[cid].zed_version >= required => Ok(*cid),
Some(_) => Err(anyhow!(proto::ErrorCode::RemoteUpgradeRequired)),
None => Err(anyhow!(proto::ErrorCode::DevServerOffline)),
}
}
pub fn channel_user_ids(
&self,
channel_id: ChannelId,
@@ -159,22 +227,39 @@ impl ConnectionPool {
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
assert!(self
.connected_users
.get(&connection.user_id)
.unwrap()
.connection_ids
.contains(connection_id));
match &connection.principal_id {
PrincipalId::UserId(user_id) => {
assert!(self
.connected_users
.get(user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
PrincipalId::DevServerId(dev_server_id) => {
assert_eq!(
self.connected_dev_servers.get(dev_server_id).unwrap(),
connection_id
);
}
}
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
self.connections.get(connection_id).unwrap().user_id,
*user_id
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::UserId(*user_id)
);
}
}
for (dev_server_id, connection_id) in &self.connected_dev_servers {
assert_eq!(
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::DevServerId(*dev_server_id)
);
}
}
}

View File

@@ -8,6 +8,7 @@ mod channel_buffer_tests;
mod channel_guest_tests;
mod channel_message_tests;
mod channel_tests;
mod dev_server_tests;
mod editor_tests;
mod following_tests;
mod integration_tests;

View File

@@ -95,9 +95,7 @@ async fn test_channel_guest_promotion(cx_a: &mut TestAppContext, cx_b: &mut Test
let room_b = cx_b
.read(ActiveCall::global)
.update(cx_b, |call, _| call.room().unwrap().clone());
cx_b.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_b.simulate_keystrokes("1 enter");
cx_b.simulate_keystrokes("cmd-p 1 enter");
let (project_b, editor_b) = workspace_b.update(cx_b, |workspace, cx| {
(

View File

@@ -0,0 +1,643 @@
use std::{path::Path, sync::Arc};
use call::ActiveCall;
use editor::Editor;
use fs::Fs;
use gpui::{TestAppContext, VisualTestContext, WindowHandle};
use rpc::{proto::DevServerStatus, ErrorCode, ErrorExt};
use serde_json::json;
use workspace::{AppState, Workspace};
use crate::tests::{following_tests::join_channel, TestServer};
use super::TestClient;
#[gpui::test]
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client) = TestServer::start1(cx).await;
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(store.dev_servers()[0].name, "server-1");
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Offline);
});
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
cx.executor().run_until_parked();
store.update(cx, |store, _| {
assert_eq!(store.dev_servers()[0].status, DevServerStatus::Online);
});
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
"2.js": "function two() { return 2; }",
"3.rs": "mod test",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let remote_workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let cx = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
cx.simulate_keystrokes("cmd-p 1 enter");
let editor = remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
});
cx.simulate_input("wow!");
cx.simulate_keystrokes("cmd-s");
let content = dev_server
.fs()
.load(Path::new("/remote/1.txt"))
.await
.unwrap();
assert_eq!(content, "wow!remote\nremote\nremote\n");
}
#[gpui::test]
async fn test_dev_server_env_files(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.executor().run_until_parked();
let cx1 = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
cx1.simulate_keystrokes("cmd-p . e enter");
let editor = remote_workspace
.update(cx1, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx1, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
let (workspace2, cx2) = client2.active_workspace(cx2);
let editor = workspace2.update(cx2, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
});
// TODO: it'd be nice to hide .env files from other people
editor.update(cx2, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "SECRET");
});
}
async fn create_dev_server_project(
server: &TestServer,
client_app_state: Arc<AppState>,
cx: &mut TestAppContext,
cx_devserver: &mut TestAppContext,
) -> (TestClient, WindowHandle<Workspace>) {
let store = cx.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx, |store, cx| {
store.create_dev_server("server-1".to_string(), None, cx)
})
.await
.unwrap();
let dev_server = server
.create_dev_server(resp.access_token, cx_devserver)
.await;
cx.executor().run_until_parked();
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
".env": "SECRET",
}),
)
.await;
store
.update(cx, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let workspace = store
.update(cx, |store, cx| {
let projects = store.dev_server_projects();
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].paths, vec!["/remote"]);
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client_app_state,
None,
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
(dev_server, workspace)
}
#[gpui::test]
async fn test_dev_server_leave_room(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| ActiveCall::global(cx).update(cx, |active_call, cx| active_call.hang_up(cx)))
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
}
#[gpui::test]
async fn test_dev_server_delete(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.delete_dev_server_project(store.dev_server_projects().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_server_projects().len(), 0);
})
})
}
#[gpui::test]
async fn test_dev_server_rename(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.rename_dev_server(
store.dev_servers().first().unwrap().id,
"name-edited".to_string(),
None,
cx,
)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().first().unwrap().name, "name-edited");
})
})
}
#[gpui::test]
async fn test_dev_server_refresh_access_token(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
cx4: &mut gpui::TestAppContext,
) {
let (server, client1, client2, channel_id) = TestServer::start2(cx1, cx2).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
join_channel(channel_id, &client2, cx2).await.unwrap();
cx2.executor().run_until_parked();
// Regenerate the access token
let new_token_response = cx1
.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, cx| {
store.regenerate_dev_server_token(store.dev_servers().first().unwrap().id, cx)
})
})
.await
.unwrap();
cx1.executor().run_until_parked();
// Assert that the other client was disconnected
let (workspace, cx2) = client2.active_workspace(cx2);
cx2.update(|cx| assert!(workspace.read(cx).project().read(cx).is_disconnected(cx)));
// Assert that the owner of the dev server does not see the dev server as online anymore
let (workspace, cx1) = client1.active_workspace(cx1);
cx1.update(|cx| {
assert!(workspace.read(cx).project().read(cx).is_disconnected(cx));
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Offline
);
})
});
// Reconnect the dev server with the new token
let _dev_server = server
.create_dev_server(new_token_response.access_token, cx4)
.await;
cx1.executor().run_until_parked();
// Assert that the dev server is online again
cx1.update(|cx| {
dev_server_projects::Store::global(cx).update(cx, |store, _| {
assert_eq!(store.dev_servers().len(), 1);
assert_eq!(
store.dev_servers().first().unwrap().status,
DevServerStatus::Online
);
})
});
}
#[gpui::test]
async fn test_dev_server_reconnect(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (mut server, client1) = TestServer::start1(cx1).await;
let channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx3).await;
cx1.update(|cx| {
workspace::join_channel(
channel_id,
client1.app_state.clone(),
Some(remote_workspace),
cx,
)
})
.await
.unwrap();
cx1.executor().run_until_parked();
remote_workspace
.update(cx1, |ws, cx| {
assert!(ws.project().read(cx).is_shared());
})
.unwrap();
drop(client1);
let client2 = server.create_client(cx2, "user_a").await;
let store = cx2.update(|cx| dev_server_projects::Store::global(cx).clone());
store
.update(cx2, |store, cx| {
let projects = store.dev_server_projects();
workspace::join_dev_server_project(
projects[0].id,
projects[0].project_id.unwrap(),
client2.app_state.clone(),
None,
cx,
)
})
.await
.unwrap();
}
#[gpui::test]
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
let (_dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
server.reset().await;
cx.run_until_parked();
cx.simulate_keystrokes("cmd-p 1 enter");
remote_workspace
.update(cx, |ws, cx| {
ws.active_item_as::<Editor>(cx)
.unwrap()
.update(cx, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
})
})
.unwrap();
}
#[gpui::test]
async fn test_create_dev_server_project_path_validation(
cx1: &mut gpui::TestAppContext,
cx2: &mut gpui::TestAppContext,
cx3: &mut gpui::TestAppContext,
) {
let (server, client1) = TestServer::start1(cx1).await;
let _channel_id = server
.make_channel("test", None, (&client1, cx1), &mut [])
.await;
// Creating a project with a path that does exist should not fail
let (_dev_server, _) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
cx1.executor().run_until_parked();
let store = cx1.update(|cx| dev_server_projects::Store::global(cx).clone());
let resp = store
.update(cx1, |store, cx| {
store.create_dev_server("server-2".to_string(), None, cx)
})
.await
.unwrap();
cx1.executor().run_until_parked();
let _dev_server = server.create_dev_server(resp.access_token, cx3).await;
cx1.executor().run_until_parked();
// Creating a remote project with a path that does not exist should fail
let result = store
.update(cx1, |store, cx| {
store.create_dev_server_project(
client::DevServerId(resp.dev_server_id),
"/notfound".to_string(),
cx,
)
})
.await;
cx1.executor().run_until_parked();
let error = result.unwrap_err();
assert!(matches!(
error.error_code(),
ErrorCode::DevServerProjectPathDoesNotExist
));
}
#[gpui::test]
async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-p 1 enter");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
let active_item = ws.active_item(cx).unwrap();
active_item.tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(
dev_server.fs().load(path).await.unwrap(),
"remote\nremote\nremote"
);
}
#[gpui::test]
async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client1) = TestServer::start1(cx1).await;
// Creating a project with a path that does exist should not fail
let (dev_server, remote_workspace) =
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
let mut cx = VisualTestContext::from_window(remote_workspace.into(), cx1);
cx.simulate_keystrokes("cmd-n");
cx.simulate_input("new!");
cx.simulate_keystrokes("cmd-shift-s");
cx.simulate_input("2.txt");
cx.simulate_keystrokes("enter");
cx.executor().run_until_parked();
let title = remote_workspace
.update(&mut cx, |ws, cx| {
ws.active_item(cx).unwrap().tab_description(0, cx).unwrap()
})
.unwrap();
assert_eq!(title, "2.txt");
let path = Path::new("/remote/2.txt");
assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!");
}

View File

@@ -12,7 +12,6 @@ use editor::{
test::editor_test_context::{AssertionContextManager, EditorTestContext},
Editor,
};
use fs::Fs;
use futures::StreamExt;
use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
use indoc::indoc;
@@ -31,7 +30,7 @@ use serde_json::json;
use settings::SettingsStore;
use std::{
ops::Range,
path::{Path, PathBuf},
path::Path,
sync::{
atomic::{self, AtomicBool, AtomicUsize},
Arc,
@@ -61,7 +60,7 @@ async fn test_host_disconnect(
.fs()
.insert_tree(
"/a",
json!({
serde_json::json!({
"a.txt": "a-contents",
"b.txt": "b-contents",
}),
@@ -2153,295 +2152,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
});
}
#[gpui::test(iterations = 30)]
async fn test_collaborating_with_editorconfig(
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
let mut server = TestServer::start(cx_a.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
cx_b.update(editor::init);
// Set up a fake language server.
client_a.language_registry().add(rust_lang());
client_a
.fs()
.insert_tree(
"/a",
json!({
"src": {
"main.rs": "mod other;\nfn main() { let foo = other::foo(); }",
"other_mod": {
"other.rs": "pub fn foo() -> usize {\n 4\n}",
".editorconfig": "",
},
},
".editorconfig": "[*]\ntab_width = 2\n",
}),
)
.await;
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
let main_buffer_a = project_a
.update(cx_a, |p, cx| {
p.open_buffer((worktree_id, "src/main.rs"), cx)
})
.await
.unwrap();
let other_buffer_a = project_a
.update(cx_a, |p, cx| {
p.open_buffer((worktree_id, "src/other_mod/other.rs"), cx)
})
.await
.unwrap();
let cx_a = cx_a.add_empty_window();
let main_editor_a =
cx_a.new_view(|cx| Editor::for_buffer(main_buffer_a, Some(project_a.clone()), cx));
let other_editor_a =
cx_a.new_view(|cx| Editor::for_buffer(other_buffer_a, Some(project_a), cx));
let mut main_editor_cx_a = EditorTestContext {
cx: cx_a.clone(),
window: cx_a.handle(),
editor: main_editor_a,
assertion_cx: AssertionContextManager::new(),
};
let mut other_editor_cx_a = EditorTestContext {
cx: cx_a.clone(),
window: cx_a.handle(),
editor: other_editor_a,
assertion_cx: AssertionContextManager::new(),
};
// Join the project as client B.
let project_b = client_b.join_remote_project(project_id, cx_b).await;
let main_buffer_b = project_b
.update(cx_b, |p, cx| {
p.open_buffer((worktree_id, "src/main.rs"), cx)
})
.await
.unwrap();
let other_buffer_b = project_b
.update(cx_b, |p, cx| {
p.open_buffer((worktree_id, "src/other_mod/other.rs"), cx)
})
.await
.unwrap();
let cx_b = cx_b.add_empty_window();
let main_editor_b =
cx_b.new_view(|cx| Editor::for_buffer(main_buffer_b, Some(project_b.clone()), cx));
let other_editor_b =
cx_b.new_view(|cx| Editor::for_buffer(other_buffer_b, Some(project_b.clone()), cx));
let mut main_editor_cx_b = EditorTestContext {
cx: cx_b.clone(),
window: cx_b.handle(),
editor: main_editor_b,
assertion_cx: AssertionContextManager::new(),
};
let mut other_editor_cx_b = EditorTestContext {
cx: cx_b.clone(),
window: cx_b.handle(),
editor: other_editor_b,
assertion_cx: AssertionContextManager::new(),
};
let initial_main = indoc! {"
ˇmod other;
fn main() { let foo = other::foo(); }"};
let initial_other = indoc! {"
ˇpub fn foo() -> usize {
4
}"};
let first_tabbed_main = indoc! {"
ˇmod other;
fn main() { let foo = other::foo(); }"};
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
first_tabbed_main,
true,
);
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
first_tabbed_main,
false,
);
let first_tabbed_other = indoc! {"
ˇpub fn foo() -> usize {
4
}"};
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
first_tabbed_other,
true,
);
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
first_tabbed_other,
false,
);
client_a
.fs()
.atomic_write(
PathBuf::from("/a/src/.editorconfig"),
"[*]\ntab_width = 3\n".to_owned(),
)
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
let second_tabbed_main = indoc! {"
ˇmod other;
fn main() { let foo = other::foo(); }"};
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
second_tabbed_main,
true,
);
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
second_tabbed_main,
false,
);
let second_tabbed_other = indoc! {"
ˇpub fn foo() -> usize {
4
}"};
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
second_tabbed_other,
true,
);
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
second_tabbed_other,
false,
);
let editorconfig_buffer_b = project_b
.update(cx_b, |p, cx| {
p.open_buffer((worktree_id, "src/other_mod/.editorconfig"), cx)
})
.await
.unwrap();
editorconfig_buffer_b.update(cx_b, |buffer, cx| {
buffer.set_text("[*.rs]\ntab_width = 6\n", cx);
});
project_b
.update(cx_b, |project, cx| {
project.save_buffer(editorconfig_buffer_b.clone(), cx)
})
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
second_tabbed_main,
true,
);
tab_undo_assert(
&mut main_editor_cx_a,
&mut main_editor_cx_b,
initial_main,
second_tabbed_main,
false,
);
let third_tabbed_other = indoc! {"
ˇpub fn foo() -> usize {
4
}"};
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
third_tabbed_other,
true,
);
tab_undo_assert(
&mut other_editor_cx_a,
&mut other_editor_cx_b,
initial_other,
third_tabbed_other,
false,
);
}
#[track_caller]
fn tab_undo_assert(
cx_a: &mut EditorTestContext,
cx_b: &mut EditorTestContext,
expected_initial: &str,
expected_tabbed: &str,
a_tabs: bool,
) {
cx_a.assert_editor_state(expected_initial);
cx_b.assert_editor_state(expected_initial);
if a_tabs {
cx_a.update_editor(|editor, cx| {
editor.tab(&editor::actions::Tab, cx);
});
} else {
cx_b.update_editor(|editor, cx| {
editor.tab(&editor::actions::Tab, cx);
});
}
cx_a.run_until_parked();
cx_b.run_until_parked();
cx_a.assert_editor_state(expected_tabbed);
cx_b.assert_editor_state(expected_tabbed);
if a_tabs {
cx_a.update_editor(|editor, cx| {
editor.undo(&editor::actions::Undo, cx);
});
} else {
cx_b.update_editor(|editor, cx| {
editor.undo(&editor::actions::Undo, cx);
});
}
cx_a.run_until_parked();
cx_b.run_until_parked();
cx_a.assert_editor_state(expected_initial);
cx_b.assert_editor_state(expected_initial);
}
fn extract_hint_labels(editor: &Editor) -> Vec<String> {
let mut labels = Vec::new();
for hint in editor.inlay_hint_cache().hints() {

View File

@@ -1589,9 +1589,8 @@ async fn test_following_stops_on_unshare(cx_a: &mut TestAppContext, cx_b: &mut T
.await;
let (workspace_b, cx_b) = client_b.join_workspace(channel_id, cx_b).await;
cx_a.simulate_keystrokes("cmd-p");
cx_a.simulate_keystrokes("cmd-p 2 enter");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
let editor_a = workspace_a.update(cx_a, |workspace, cx| {
workspace.active_item_as::<Editor>(cx).unwrap()
@@ -2042,9 +2041,7 @@ async fn test_following_to_channel_notes_other_workspace(
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2101,9 +2098,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
share_workspace(&workspace_a, cx_a).await.unwrap();
// a opens 1.txt
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("1 enter");
cx_a.simulate_keystrokes("cmd-p 1 enter");
cx_a.run_until_parked();
workspace_a.update(cx_a, |workspace, cx| {
let editor = workspace.active_item(cx).unwrap();
@@ -2123,9 +2118,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
cx_b.simulate_keystrokes("down");
// a opens a different file while not followed
cx_a.simulate_keystrokes("cmd-p");
cx_a.run_until_parked();
cx_a.simulate_keystrokes("2 enter");
cx_a.simulate_keystrokes("cmd-p 2 enter");
workspace_b.update(cx_b, |workspace, cx| {
let editor = workspace.active_item_as::<Editor>(cx).unwrap();
@@ -2135,9 +2128,7 @@ async fn test_following_while_deactivated(cx_a: &mut TestAppContext, cx_b: &mut
// a opens a file in a new window
let (_, cx_a2) = client_a.build_test_workspace(&mut cx_a2).await;
cx_a2.update(|cx| cx.activate_window());
cx_a2.simulate_keystrokes("cmd-p");
cx_a2.run_until_parked();
cx_a2.simulate_keystrokes("3 enter");
cx_a2.simulate_keystrokes("cmd-p 3 enter");
cx_a2.run_until_parked();
// b starts following a again

View File

@@ -34,7 +34,7 @@ use project::{
};
use rand::prelude::*;
use serde_json::json;
use settings::SettingsStore;
use settings::{LocalSettingsKind, SettingsStore};
use std::{
cell::{Cell, RefCell},
env, future, mem,
@@ -3328,8 +3328,16 @@ async fn test_local_settings(
.local_settings(worktree_b.read(cx).id())
.collect::<Vec<_>>(),
&[
(Path::new("").into(), r#"{"tab_size":2}"#.to_string()),
(Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
(
Path::new("").into(),
LocalSettingsKind::Settings,
r#"{"tab_size":2}"#.to_string()
),
(
Path::new("a").into(),
LocalSettingsKind::Settings,
r#"{"tab_size":8}"#.to_string()
),
]
)
});
@@ -3347,8 +3355,16 @@ async fn test_local_settings(
.local_settings(worktree_b.read(cx).id())
.collect::<Vec<_>>(),
&[
(Path::new("").into(), r#"{}"#.to_string()),
(Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
(
Path::new("").into(),
LocalSettingsKind::Settings,
r#"{}"#.to_string()
),
(
Path::new("a").into(),
LocalSettingsKind::Settings,
r#"{"tab_size":8}"#.to_string()
),
]
)
});
@@ -3376,8 +3392,16 @@ async fn test_local_settings(
.local_settings(worktree_b.read(cx).id())
.collect::<Vec<_>>(),
&[
(Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
(Path::new("b").into(), r#"{"tab_size":4}"#.to_string()),
(
Path::new("a").into(),
LocalSettingsKind::Settings,
r#"{"tab_size":8}"#.to_string()
),
(
Path::new("b").into(),
LocalSettingsKind::Settings,
r#"{"tab_size":4}"#.to_string()
),
]
)
});
@@ -3407,7 +3431,11 @@ async fn test_local_settings(
store
.local_settings(worktree_b.read(cx).id())
.collect::<Vec<_>>(),
&[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),]
&[(
Path::new("a").into(),
LocalSettingsKind::Settings,
r#"{"hard_tabs":true}"#.to_string()
),]
)
});
}

Some files were not shown because too many files have changed in this diff Show More