Compare commits

..

4 Commits

Author SHA1 Message Date
Julia Ryan
fa1aba4a7a temp: disable other actions to test nix bundling 2025-03-19 16:27:18 -07:00
Julia Ryan
e39ae07196 nix: don't block nightly release on failure 2025-03-19 15:32:26 -07:00
Julia Ryan
dea699a1bc nix: Add cachix as substituter 2025-03-18 12:40:36 -07:00
Julia Ryan
3642a27cae nix: add nightly nix build with cachix 2025-03-18 12:37:10 -07:00
330 changed files with 5002 additions and 7620 deletions

View File

@@ -10,7 +10,7 @@ runs:
cargo install cargo-nextest --locked
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "18"

View File

@@ -16,7 +16,7 @@ runs:
run: cargo install cargo-nextest --locked
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "18"

View File

@@ -482,7 +482,7 @@ jobs:
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "18"
@@ -526,14 +526,14 @@ jobs:
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
@@ -586,7 +586,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -595,7 +595,7 @@ jobs:
path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -647,7 +647,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -656,7 +656,7 @@ jobs:
path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')

View File

@@ -1,7 +1,7 @@
name: "Close Stale Issues"
on:
schedule:
- cron: "0 7,9,11 * * 2"
- cron: "0 11 * * 2"
workflow_dispatch:
jobs:

View File

@@ -13,11 +13,10 @@ jobs:
id: get-release-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
URL="https://zed.dev/releases/stable/latest"
fi
echo "URL=$URL" >> $GITHUB_OUTPUT
- name: Get content
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
@@ -39,30 +38,28 @@ jobs:
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check if release was promoted from preview
id: check-promotion-from-preview
run: |
VERSION="${{ github.event.release.tag_name }}"
PREVIEW_TAG="${VERSION}-pre"
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
echo "was_promoted_from_preview=true" >> $GITHUB_OUTPUT
if git rev-parse "$PREVIEW_TAG" >/dev/null 2>&1; then
echo "was_preview=true" >> $GITHUB_OUTPUT
else
echo "was_promoted_from_preview=false" >> $GITHUB_OUTPUT
echo "was_preview=false" >> $GITHUB_OUTPUT
fi
- name: Send release notes email
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
if: steps.check-promotion-from-preview.outputs.was_preview == 'true'
run: |
TAG="${{ github.event.release.tag_name }}"
echo \"${{ toJSON(github.event.release.body) }}\" > release_body.txt
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
> release_data.json
curl -X POST "https://zed.dev/api/send_release_notes_email" \
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
-H "Content-Type: application/json" \
-d @release_data.json
-d '{
"version": "${{ github.event.release.tag_name }}",
"markdown_body": ${{ toJSON(github.event.release.body) }}
}'

View File

@@ -22,7 +22,7 @@ jobs:
version: 9
- name: Setup Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -37,35 +37,35 @@ jobs:
mdbook build ./docs --dest-dir=../target/deploy/docs/
- name: Deploy Docs
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy target/deploy --project-name=docs
- name: Deploy Install
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
- name: Deploy Docs Workers
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Deploy Install Workers
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Preserve Wrangler logs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
if: always()
with:
name: wrangler_logs

View File

@@ -18,7 +18,7 @@ jobs:
version: 9
- name: Setup Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -23,7 +23,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
with:
node-version: "18"

View File

@@ -5,8 +5,10 @@ on:
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
push:
tags:
- "nightly"
# tags:
# - "nightly"
branches:
- nix
env:
CARGO_TERM_COLOR: always
@@ -28,147 +30,147 @@ jobs:
clean: false
fetch-depth: 0
- name: Run style checks
uses: ./.github/actions/check_style
# - name: Run style checks
# uses: ./.github/actions/check_style
- name: Run clippy
run: ./script/clippy
# - name: Run clippy
# run: ./script/clippy
tests:
timeout-minutes: 60
name: Run tests
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- test
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# tests:
# timeout-minutes: 60
# name: Run tests
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - self-hosted
# - test
# needs: style
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
- name: Run tests
uses: ./.github/actions/run_tests
# - name: Run tests
# uses: ./.github/actions/run_tests
bundle-mac:
timeout-minutes: 60
name: Create a macOS bundle
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- bundle
needs: tests
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"
# bundle-mac:
# timeout-minutes: 60
# name: Create a macOS bundle
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - self-hosted
# - bundle
# needs: tests
# env:
# MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
# MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
# APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
# APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
# APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Install Node
# uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
# with:
# node-version: "18"
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
- name: Set release channel to nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Set release channel to nightly
# run: |
# set -eu
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Create macOS app bundle
run: script/bundle-mac
# - name: Create macOS app bundle
# run: script/bundle-mac
- name: Upload Zed Nightly
run: script/upload-nightly macos
# - name: Upload Zed Nightly
# run: script/upload-nightly macos
bundle-linux-x86:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for x86
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2004
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# bundle-linux-x86:
# timeout-minutes: 60
# name: Create a Linux *.tar.gz bundle for x86
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - buildjet-16vcpu-ubuntu-2004
# needs: tests
# env:
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
# - name: Add Rust to the PATH
# run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Install Linux dependencies
run: ./script/linux && ./script/install-mold 2.34.0
# - name: Install Linux dependencies
# run: ./script/linux && ./script/install-mold 2.34.0
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
# - name: Limit target directory size
# run: script/clear-target-dir-if-larger-than 100
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Set release channel to nightly
# run: |
# set -euo pipefail
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
# - name: Create Linux .tar.gz bundle
# run: script/bundle-linux
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
# - name: Upload Zed Nightly
# run: script/upload-nightly linux-targz
bundle-linux-arm:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for ARM
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204-arm
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# bundle-linux-arm:
# timeout-minutes: 60
# name: Create a Linux *.tar.gz bundle for ARM
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - buildjet-16vcpu-ubuntu-2204-arm
# needs: tests
# env:
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
- name: Install Linux dependencies
run: ./script/linux
# - name: Install Linux dependencies
# run: ./script/linux
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
# - name: Limit target directory size
# run: script/clear-target-dir-if-larger-than 100
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Set release channel to nightly
# run: |
# set -euo pipefail
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
# - name: Create Linux .tar.gz bundle
# run: script/bundle-linux
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
# - name: Upload Zed Nightly
# run: script/upload-nightly linux-targz
bundle-nix:
timeout-minutes: 60
@@ -180,17 +182,16 @@ jobs:
system:
- os: x86 Linux
runner: buildjet-16vcpu-ubuntu-2204
install_nix: true
install: true
- os: arm Mac
# TODO: once other macs are provisioned for nix, remove that constraint from the runner
runner: [macOS, ARM64, nix]
install_nix: false
install: false
- os: arm Linux
runner: buildjet-16vcpu-ubuntu-2204-arm
install_nix: true
install: true
if: github.repository_owner == 'zed-industries'
runs-on: ${{ matrix.system.runner }}
needs: tests
# needs: tests
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
@@ -200,49 +201,43 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# on our macs we manually install nix. for some reason the cachix action is running
# under a non-login /bin/bash shell which doesn't source the proper script to add the
# nix profile to PATH, so we manually add them here
- name: Set path
if: ${{ ! matrix.system.install_nix }}
if: ${{ ! matrix.system.install }}
run: |
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
if: ${{ matrix.system.install_nix }}
if: ${{ matrix.system.install }}
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
with:
name: zed-industries
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
- run: nix build
- run: nix-collect-garbage -d
update-nightly-tag:
name: Update nightly tag
if: github.repository_owner == 'zed-industries'
runs-on: ubuntu-latest
needs:
- bundle-mac
- bundle-linux-x86
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
# update-nightly-tag:
# name: Update nightly tag
# if: github.repository_owner == 'zed-industries'
# runs-on: ubuntu-latest
# needs:
# - bundle-mac
# - bundle-linux-x86
# - bundle-linux-arm
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# fetch-depth: 0
- name: Update nightly tag
run: |
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
echo "Nightly tag already points to current commit. Skipping tagging."
exit 0
fi
git config user.name github-actions
git config user.email github-actions@github.com
git tag -f nightly
git push origin nightly --force
# - name: Update nightly tag
# run: |
# if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
# echo "Nightly tag already points to current commit. Skipping tagging."
# exit 0
# fi
# git config user.name github-actions
# git config user.email github-actions@github.com
# git tag -f nightly
# git push origin nightly --force

115
Cargo.lock generated
View File

@@ -244,7 +244,7 @@ dependencies = [
"serde",
"serde_json",
"strum",
"thiserror 2.0.12",
"thiserror 1.0.69",
"util",
]
@@ -467,7 +467,6 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"git",
"git_ui",
"gpui",
"heed",
"html_to_markdown",
@@ -729,7 +728,6 @@ dependencies = [
"settings",
"theme",
"ui",
"unindent",
"util",
"workspace",
"worktree",
@@ -795,9 +793,9 @@ dependencies = [
[[package]]
name = "async-compression"
version = "0.4.21"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0cf008e5e1a9e9e22a7d3c9a4992e21a350290069e36d8fb72304ed17e8f2d2"
checksum = "310c9bcae737a48ef5cdee3174184e6d548b292739ede61a1f955ef76a738861"
dependencies = [
"deflate64",
"flate2",
@@ -979,9 +977,9 @@ dependencies = [
[[package]]
name = "async-std"
version = "1.13.1"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24"
checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615"
dependencies = [
"async-attributes",
"async-channel 1.9.0",
@@ -1081,9 +1079,9 @@ dependencies = [
[[package]]
name = "async-trait"
version = "0.1.88"
version = "0.1.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97"
dependencies = [
"proc-macro2",
"quote",
@@ -1843,7 +1841,7 @@ dependencies = [
"serde",
"serde_json",
"strum",
"thiserror 2.0.12",
"thiserror 1.0.69",
"tokio",
]
@@ -2356,7 +2354,7 @@ dependencies = [
"cap-primitives",
"cap-std",
"io-lifetimes",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -2384,7 +2382,7 @@ dependencies = [
"ipnet",
"maybe-owned",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
"winx",
]
@@ -2444,7 +2442,7 @@ dependencies = [
"semver",
"serde",
"serde_json",
"thiserror 2.0.12",
"thiserror 2.0.6",
]
[[package]]
@@ -2768,7 +2766,7 @@ dependencies = [
"telemetry",
"telemetry_events",
"text",
"thiserror 2.0.12",
"thiserror 1.0.69",
"time",
"tiny_http",
"tokio-socks",
@@ -2969,7 +2967,7 @@ dependencies = [
"telemetry_events",
"text",
"theme",
"thiserror 2.0.12",
"thiserror 1.0.69",
"time",
"tokio",
"toml 0.8.20",
@@ -4592,7 +4590,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -5239,8 +5237,8 @@ dependencies = [
"fsevent",
"futures 0.3.31",
"git",
"git2",
"gpui",
"ignore",
"libc",
"log",
"notify 6.1.1",
@@ -5267,7 +5265,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4"
dependencies = [
"io-lifetimes",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -5602,13 +5600,11 @@ dependencies = [
"serde_json",
"smol",
"sum_tree",
"tempfile",
"text",
"time",
"unindent",
"url",
"util",
"uuid",
]
[[package]]
@@ -5684,6 +5680,7 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
"smallvec",
"strum",
"telemetry",
"theme",
@@ -5881,7 +5878,7 @@ dependencies = [
"strum",
"sum_tree",
"taffy",
"thiserror 2.0.12",
"thiserror 1.0.69",
"unicode-segmentation",
"usvg",
"util",
@@ -6926,7 +6923,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65"
dependencies = [
"io-lifetimes",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -7329,7 +7326,7 @@ dependencies = [
"smol",
"strum",
"telemetry_events",
"thiserror 2.0.12",
"thiserror 1.0.69",
"ui",
"util",
]
@@ -7572,7 +7569,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.48.5",
"windows-targets 0.52.6",
]
[[package]]
@@ -9691,7 +9688,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
dependencies = [
"memchr",
"thiserror 2.0.12",
"thiserror 2.0.6",
"ucd-trie",
]
@@ -10963,7 +10960,7 @@ dependencies = [
"rustc-hash 2.1.1",
"rustls 0.23.23",
"socket2",
"thiserror 2.0.12",
"thiserror 2.0.6",
"tokio",
"tracing",
]
@@ -10982,7 +10979,7 @@ dependencies = [
"rustls 0.23.23",
"rustls-pki-types",
"slab",
"thiserror 2.0.12",
"thiserror 2.0.6",
"tinyvec",
"tracing",
"web-time",
@@ -10999,7 +10996,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -11391,7 +11388,7 @@ dependencies = [
"shlex",
"smol",
"tempfile",
"thiserror 2.0.12",
"thiserror 1.0.69",
"urlencoding",
"util",
]
@@ -11924,7 +11921,7 @@ dependencies = [
"libc",
"linux-raw-sys",
"once_cell",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -12471,9 +12468,9 @@ dependencies = [
[[package]]
name = "semver"
version = "1.0.26"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03"
dependencies = [
"serde",
]
@@ -12706,13 +12703,6 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shell_parser"
version = "0.1.0"
dependencies = [
"shlex",
]
[[package]]
name = "shellexpand"
version = "2.1.2"
@@ -13106,7 +13096,7 @@ dependencies = [
"serde_json",
"sha2",
"smallvec",
"thiserror 2.0.12",
"thiserror 2.0.6",
"time",
"tokio",
"tokio-stream",
@@ -13195,7 +13185,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.12",
"thiserror 2.0.6",
"time",
"tracing",
"uuid",
@@ -13238,7 +13228,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.12",
"thiserror 2.0.6",
"time",
"tracing",
"uuid",
@@ -13723,7 +13713,7 @@ dependencies = [
"fd-lock",
"io-lifetimes",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
"winx",
]
@@ -13867,7 +13857,7 @@ dependencies = [
"getrandom 0.3.1",
"once_cell",
"rustix",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -13912,7 +13902,7 @@ dependencies = [
"sysinfo",
"task",
"theme",
"thiserror 2.0.12",
"thiserror 1.0.69",
"util",
"windows 0.60.0",
]
@@ -14007,7 +13997,7 @@ dependencies = [
"serde_repr",
"settings",
"strum",
"thiserror 2.0.12",
"thiserror 1.0.69",
"util",
"uuid",
]
@@ -14073,11 +14063,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.12"
version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47"
dependencies = [
"thiserror-impl 2.0.12",
"thiserror-impl 2.0.6",
]
[[package]]
@@ -14093,9 +14083,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.12"
version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312"
dependencies = [
"proc-macro2",
"quote",
@@ -15033,19 +15023,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "ui_prompt"
version = "0.1.0"
dependencies = [
"gpui",
"markdown",
"menu",
"settings",
"theme",
"ui",
"workspace",
]
[[package]]
name = "unicase"
version = "2.8.1"
@@ -16261,7 +16238,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -16821,7 +16798,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
dependencies = [
"bitflags 2.8.0",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -17342,7 +17319,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.180.0"
version = "0.179.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -17405,6 +17382,7 @@ dependencies = [
"languages",
"libc",
"log",
"markdown",
"markdown_preview",
"menu",
"migrator",
@@ -17456,7 +17434,6 @@ dependencies = [
"tree-sitter-md",
"tree-sitter-rust",
"ui",
"ui_prompt",
"url",
"urlencoding",
"util",
@@ -17743,7 +17720,7 @@ dependencies = [
"telemetry",
"telemetry_events",
"theme",
"thiserror 2.0.12",
"thiserror 1.0.69",
"tree-sitter-go",
"tree-sitter-rust",
"ui",

View File

@@ -131,7 +131,6 @@ members = [
"crates/session",
"crates/settings",
"crates/settings_ui",
"crates/shell_parser",
"crates/snippet",
"crates/snippet_provider",
"crates/snippets_ui",
@@ -161,7 +160,6 @@ members = [
"crates/ui",
"crates/ui_input",
"crates/ui_macros",
"crates/ui_prompt",
"crates/util",
"crates/util_macros",
"crates/vim",
@@ -364,7 +362,6 @@ toolchain_selector = { path = "crates/toolchain_selector" }
ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" }
ui_prompt = { path = "crates/ui_prompt" }
util = { path = "crates/util" }
util_macros = { path = "crates/util_macros" }
vim = { path = "crates/vim" }
@@ -536,7 +533,7 @@ sys-locale = "0.3.1"
sysinfo = "0.31.0"
take-until = "0.2.0"
tempfile = "3.9.0"
thiserror = "2.0.12"
thiserror = "1.0.29"
tiktoken-rs = "0.6.0"
time = { version = "0.3", features = [
"macros",

View File

@@ -53,9 +53,7 @@
"context": "Prompt",
"bindings": {
"left": "menu::SelectPrevious",
"right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
"right": "menu::SelectNext"
}
},
{
@@ -754,8 +752,6 @@
"escape": "git_panel::ToggleFocus",
"ctrl-enter": "git::Commit",
"alt-enter": "menu::SecondaryConfirm",
"delete": "git::RestoreFile",
"shift-delete": "git::RestoreFile",
"backspace": "git::RestoreFile"
}
},

View File

@@ -705,16 +705,6 @@
"ctrl-]": "assistant::CycleNextInlineAssist"
}
},
{
"context": "Prompt",
"use_key_equivalents": true,
"bindings": {
"left": "menu::SelectPrevious",
"right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
}
},
{
"context": "ProjectSearchBar && !in_replace",
"use_key_equivalents": true,
@@ -801,8 +791,6 @@
"shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus",
"cmd-enter": "git::Commit",
"delete": "git::RestoreFile",
"cmd-backspace": "git::RestoreFile",
"backspace": "git::RestoreFile"
}
},

View File

@@ -136,11 +136,6 @@
// Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true,
// Whether to use the system provided dialogs for prompts, such as confirmation
// prompts.
// When set to false, Zed will use its built-in prompts. Note that on Linux,
// this option is ignored and Zed will always use the built-in prompts.
"use_system_prompts": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// Cursor shape for the default editor.

View File

@@ -63,9 +63,9 @@ impl ActivityIndicator {
let auto_updater = AutoUpdater::get(cx);
let this = cx.new(|cx| {
let mut status_events = languages.language_server_binary_statuses();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
while let Some((name, status)) = status_events.next().await {
this.update(cx, |this: &mut ActivityIndicator, cx| {
this.update(&mut cx, |this: &mut ActivityIndicator, cx| {
this.statuses.retain(|s| s.name != name);
this.statuses.push(ServerStatus { name, status });
cx.notify();
@@ -76,9 +76,9 @@ impl ActivityIndicator {
.detach();
let mut status_events = languages.dap_server_binary_statuses();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
while let Some((name, status)) = status_events.next().await {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.statuses.retain(|s| s.name != name);
this.statuses.push(ServerStatus { name, status });
cx.notify();
@@ -123,9 +123,9 @@ impl ActivityIndicator {
let project = project.clone();
let error = error.clone();
let server_name = server_name.clone();
cx.spawn_in(window, async move |workspace, cx| {
cx.spawn_in(window, |workspace, mut cx| async move {
let buffer = create_buffer.await?;
buffer.update(cx, |buffer, cx| {
buffer.update(&mut cx, |buffer, cx| {
buffer.edit(
[(
0..0,
@@ -136,7 +136,7 @@ impl ActivityIndicator {
);
buffer.set_capability(language::Capability::ReadOnly, cx);
})?;
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(
Box::new(cx.new(|cx| {
Editor::for_buffer(buffer, Some(project.clone()), window, cx)

View File

@@ -34,9 +34,9 @@ impl AskPassDelegate {
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
) -> Self {
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
let task = cx.spawn(async move |cx: &mut AsyncApp| {
let task = cx.spawn(|mut cx| async move {
while let Some((prompt, channel)) = rx.next().await {
password_prompt(prompt, channel, cx);
password_prompt(prompt, channel, &mut cx);
}
});
Self { tx, _task: task }

View File

@@ -98,9 +98,9 @@ pub fn init(
AssistantSettings::register(cx);
SlashCommandSettings::register(cx);
cx.spawn({
cx.spawn(|mut cx| {
let client = client.clone();
async move |cx| {
async move {
let is_search_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
.await;
@@ -116,7 +116,7 @@ pub fn init(
let semantic_index = SemanticDb::new(
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
Arc::new(embedding_provider),
cx,
&mut cx,
)
.await?;

View File

@@ -98,16 +98,16 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
let context_store = workspace
.update(cx, |workspace, cx| {
.update(&mut cx, |workspace, cx| {
let project = workspace.project().clone();
ContextStore::new(project, prompt_builder.clone(), slash_commands, cx)
})?
.await?;
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
// TODO: deserialize state.
cx.new(|cx| Self::new(workspace, context_store, window, cx))
})
@@ -357,9 +357,9 @@ impl AssistantPanel {
) -> Task<()> {
let mut status_rx = client.status();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
while let Some(status) = status_rx.next().await {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if this.client_status.is_none()
|| this
.client_status
@@ -371,7 +371,7 @@ impl AssistantPanel {
})
.log_err();
}
this.update(cx, |this, _cx| this.watch_client_status = None)
this.update(&mut cx, |this, _cx| this.watch_client_status = None)
.log_err();
})
}
@@ -576,11 +576,11 @@ impl AssistantPanel {
if self.authenticate_provider_task.is_none() {
self.authenticate_provider_task = Some((
provider.id(),
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
if let Some(future) = load_credentials {
let _ = future.await;
}
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.authenticate_provider_task = None;
})
.log_err();
@@ -641,9 +641,9 @@ impl AssistantPanel {
}
} else {
let assistant_panel = assistant_panel.downgrade();
cx.spawn_in(window, async move |workspace, cx| {
cx.spawn_in(window, |workspace, mut cx| async move {
let Some(task) =
assistant_panel.update(cx, |assistant, cx| assistant.authenticate(cx))?
assistant_panel.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
else {
let answer = cx
.prompt(
@@ -665,7 +665,7 @@ impl AssistantPanel {
return Ok(());
};
task.await?;
if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx))? {
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? {
cx.update(|window, cx| match inline_assist_target {
InlineAssistTarget::Editor(active_editor, include_context) => {
let assistant_panel = if include_context {
@@ -698,7 +698,7 @@ impl AssistantPanel {
}
})?
} else {
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx)
})?;
}
@@ -791,10 +791,10 @@ impl AssistantPanel {
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let context = task.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
let workspace = this.workspace.clone();
let project = this.project.clone();
let lsp_adapter_delegate =
@@ -847,9 +847,9 @@ impl AssistantPanel {
self.show_context(editor.clone(), window, cx);
let workspace = self.workspace.clone();
cx.spawn_in(window, async move |_, cx| {
cx.spawn_in(window, move |_, mut cx| async move {
workspace
.update_in(cx, |workspace, window, cx| {
.update_in(&mut cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx);
})
.ok();
@@ -1069,8 +1069,8 @@ impl AssistantPanel {
.filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path))
});
if let Some(existing_context) = existing_context {
return cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
return cx.spawn_in(window, |this, mut cx| async move {
this.update_in(&mut cx, |this, window, cx| {
this.show_context(existing_context, window, cx)
})
});
@@ -1085,9 +1085,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let context = context.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,
@@ -1117,8 +1117,8 @@ impl AssistantPanel {
.filter(|editor| *editor.read(cx).context().read(cx).id() == id)
});
if let Some(existing_context) = existing_context {
return cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
return cx.spawn_in(window, |this, mut cx| async move {
this.update_in(&mut cx, |this, window, cx| {
this.show_context(existing_context.clone(), window, cx)
})?;
Ok(existing_context)
@@ -1134,9 +1134,9 @@ impl AssistantPanel {
.log_err()
.flatten();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let context = context.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,

View File

@@ -1311,9 +1311,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(),
scroll_lock: None,
highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn({
_update_highlights: cx.spawn(|cx| {
let editor = editor.downgrade();
async move |cx| {
async move {
while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1850,7 +1850,7 @@ impl PromptEditor {
fn count_tokens(&mut self, cx: &mut Context<Self>) {
let assist_id = self.id;
self.pending_token_count = cx.spawn(async move |this, cx| {
self.pending_token_count = cx.spawn(|this, mut cx| async move {
cx.background_executor().timer(Duration::from_secs(1)).await;
let token_count = cx
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
@@ -1862,7 +1862,7 @@ impl PromptEditor {
})??
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.token_counts = Some(token_count);
cx.notify();
})
@@ -2882,7 +2882,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
self.request = Some(request.clone());
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -2999,207 +2999,213 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone();
self.generation = cx.spawn(async move |codegen, cx| {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
self.generation = cx.spawn(|codegen, mut cx| {
async move {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
line_indent = None;
first_line = false;
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
}
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message = result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})?;
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
cx.notify();
})?;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(&mut cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
}
});
cx.notify();
}
@@ -3317,7 +3323,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot);
cx.spawn(async move |codegen, cx| {
cx.spawn(|codegen, mut cx| async move {
let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move {
let old_text = old_snapshot
@@ -3367,7 +3373,7 @@ impl CodegenAlternative {
.await;
codegen
.update(cx, |codegen, cx| {
.update(&mut cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify();
@@ -3581,10 +3587,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
) -> Task<Result<ProjectTransaction>> {
let editor = self.editor.clone();
let workspace = self.workspace.clone();
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let editor = editor.upgrade().context("editor was released")?;
let range = editor
.update(cx, |editor, cx| {
.update(&mut cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx);
@@ -3619,7 +3625,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
})
})?
.context("invalid range")?;
let assistant_panel = workspace.update(cx, |workspace, cx| {
let assistant_panel = workspace.update(&mut cx, |workspace, cx| {
workspace
.panel::<AssistantPanel>(cx)
.context("assistant panel was released")

View File

@@ -825,7 +825,7 @@ impl PromptEditor {
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
return;
};
self.pending_token_count = cx.spawn(async move |this, cx| {
self.pending_token_count = cx.spawn(|this, mut cx| async move {
cx.background_executor().timer(Duration::from_secs(1)).await;
let request =
cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| {
@@ -833,7 +833,7 @@ impl PromptEditor {
})??;
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.token_count = Some(token_count);
cx.notify();
})
@@ -1140,7 +1140,7 @@ impl Codegen {
let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(async move |this, cx| {
self.generation = cx.spawn(|this, mut cx| async move {
let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await;
@@ -1197,12 +1197,12 @@ impl Codegen {
}
});
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.message_id = message_id;
})?;
while let Some(hunk) = hunks_rx.next().await {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx);
cx.notify();
@@ -1216,7 +1216,7 @@ impl Codegen {
let result = generate.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {

View File

@@ -39,7 +39,6 @@ fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
git.workspace = true
git_ui.workspace = true
gpui.workspace = true
heed.workspace = true
html_to_markdown.workspace = true

View File

@@ -372,10 +372,10 @@ impl ActiveThread {
cx,
);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let updated_context_ids = refresh_task.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.context_store.read_with(cx, |context_store, cx| {
context_store
.context()
@@ -394,10 +394,10 @@ impl ActiveThread {
let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() {
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let updated_context = context_update_task.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.thread.update(cx, |thread, cx| {
thread.attach_tool_results(updated_context, cx);
if !canceled {
@@ -418,9 +418,9 @@ impl ActiveThread {
/// Only one task to save the thread will be in flight at a time.
fn save_thread(&mut self, cx: &mut Context<Self>) {
let thread = self.thread.clone();
self.save_thread_task = Some(cx.spawn(async move |this, cx| {
self.save_thread_task = Some(cx.spawn(|this, mut cx| async move {
let task = this
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
this.thread_store
.update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx))
})
@@ -550,7 +550,6 @@ impl ActiveThread {
let thread = self.thread.read(cx);
// Get all the data we need from thread before we start using it in closures
let checkpoint = thread.checkpoint_for_message(message_id);
let context = thread.context_for_message(message_id);
let tool_uses = thread.tool_uses_for_message(message_id);
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
@@ -584,7 +583,7 @@ impl ActiveThread {
.p_2p5()
.child(edit_message_editor)
} else {
div().text_ui(cx).child(markdown.clone())
div().p_2p5().text_ui(cx).child(markdown.clone())
},
)
.when_some(context, |parent, context| {
@@ -604,16 +603,15 @@ impl ActiveThread {
let styled_message = match message.role {
Role::User => v_flex()
.id(("message-container", ix))
.pt_2()
.pl_2()
.pr_2p5()
.pt_2p5()
.px_2p5()
.child(
v_flex()
.bg(colors.editor_background)
.rounded_lg()
.border_1()
.border_color(colors.border)
.shadow_md()
.shadow_sm()
.child(
h_flex()
.py_1()
@@ -704,12 +702,12 @@ impl ActiveThread {
},
),
)
.child(div().p_2().child(message_content)),
.child(message_content),
),
Role::Assistant => {
v_flex()
.id(("message-container", ix))
.child(div().py_3().px_4().child(message_content))
.child(message_content)
.when(
!tool_uses.is_empty() || !scripting_tool_uses.is_empty(),
|parent| {
@@ -731,29 +729,11 @@ impl ActiveThread {
v_flex()
.bg(colors.editor_background)
.rounded_sm()
.child(div().p_4().child(message_content)),
.child(message_content),
),
};
v_flex()
.when_some(checkpoint, |parent, checkpoint| {
parent.child(
h_flex().pl_2().child(
Button::new("restore-checkpoint", "Restore Checkpoint")
.icon(IconName::Undo)
.size(ButtonSize::Compact)
.on_click(cx.listener(move |this, _, _window, cx| {
this.thread.update(cx, |thread, cx| {
thread
.restore_checkpoint(checkpoint.clone(), cx)
.detach_and_log_err(cx);
});
})),
),
)
})
.child(styled_message)
.into_any()
styled_message.into_any()
}
fn render_tool_use(&self, tool_use: ToolUse, cx: &mut Context<Self>) -> impl IntoElement {
@@ -765,7 +745,7 @@ impl ActiveThread {
let lighter_border = cx.theme().colors().border.opacity(0.5);
div().px_4().child(
div().px_2p5().child(
v_flex()
.rounded_lg()
.border_1()

View File

@@ -1,59 +0,0 @@
use std::sync::Arc;
use collections::HashMap;
use gpui::SharedString;
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: HashMap<Arc<str>, bool>,
#[allow(dead_code)]
pub context_servers: HashMap<Arc<str>, ContextServerPreset>,
}
#[derive(Debug, Clone)]
pub struct ContextServerPreset {
#[allow(dead_code)]
pub tools: HashMap<Arc<str>, bool>,
}
impl AgentProfile {
pub fn read_only() -> Self {
Self {
name: "Read-only".into(),
tools: HashMap::from_iter([
("diagnostics".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
pub fn code_writer() -> Self {
Self {
name: "Code Writer".into(),
tools: HashMap::from_iter([
("bash".into(), true),
("delete-path".into(), true),
("diagnostics".into(), true),
("edit-files".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
}

View File

@@ -1,5 +1,4 @@
mod active_thread;
mod agent_profile;
mod assistant_configuration;
mod assistant_model_selector;
mod assistant_panel;

View File

@@ -1,33 +1,19 @@
use std::sync::Arc;
use assistant_tool::{ToolSource, ToolWorkingSet};
use collections::HashMap;
use context_server::manager::ContextServerManager;
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use gpui::{Action, AnyView, App, EventEmitter, FocusHandle, Focusable, Subscription};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use ui::{
prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip,
};
use util::ResultExt as _;
use ui::{prelude::*, Divider, DividerColor, ElevationIndex};
use zed_actions::assistant::DeployPromptLibrary;
use zed_actions::ExtensionCategoryFilter;
pub struct AssistantConfiguration {
focus_handle: FocusHandle,
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
context_server_manager: Entity<ContextServerManager>,
expanded_context_server_tools: HashMap<Arc<str>, bool>,
tools: Arc<ToolWorkingSet>,
_registry_subscription: Subscription,
}
impl AssistantConfiguration {
pub fn new(
context_server_manager: Entity<ContextServerManager>,
tools: Arc<ToolWorkingSet>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
pub fn new(window: &mut Window, cx: &mut Context<Self>) -> Self {
let focus_handle = cx.focus_handle();
let registry_subscription = cx.subscribe_in(
@@ -50,9 +36,6 @@ impl AssistantConfiguration {
let mut this = Self {
focus_handle,
configuration_views_by_provider: HashMap::default(),
context_server_manager,
expanded_context_server_tools: HashMap::default(),
tools,
_registry_subscription: registry_subscription,
};
this.build_provider_configuration_views(window, cx);
@@ -160,185 +143,6 @@ impl AssistantConfiguration {
}),
)
}
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
let tools_by_source = self.tools.tools_by_source(cx);
let empty = Vec::new();
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.mt_1()
.gap_2()
.flex_1()
.child(
v_flex()
.gap_0p5()
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
.child(Label::new(SUBHEADING).color(Color::Muted)),
)
.children(context_servers.into_iter().map(|context_server| {
let is_running = context_server.client().is_some();
let are_tools_expanded = self
.expanded_context_server_tools
.get(&context_server.id())
.copied()
.unwrap_or_default();
let tools = tools_by_source
.get(&ToolSource::ContextServer {
id: context_server.id().into(),
})
.unwrap_or_else(|| &empty);
let tool_count = tools.len();
v_flex()
.border_1()
.rounded_sm()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.justify_between()
.px_2()
.py_1()
.when(are_tools_expanded, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(
h_flex()
.gap_2()
.child(
Disclosure::new("tool-list-disclosure", are_tools_expanded)
.on_click(cx.listener({
let context_server_id = context_server.id();
move |this, _event, _window, _cx| {
let is_open = this
.expanded_context_server_tools
.entry(context_server_id.clone())
.or_insert(false);
*is_open = !*is_open;
}
})),
)
.child(Indicator::dot().color(if is_running {
Color::Success
} else {
Color::Error
}))
.child(Label::new(context_server.id()))
.child(
Label::new(format!("{tool_count} tools"))
.color(Color::Muted),
),
)
.child(h_flex().child(
Switch::new("context-server-switch", is_running.into()).on_click({
let context_server_manager =
self.context_server_manager.clone();
let context_server = context_server.clone();
move |state, _window, cx| match state {
ToggleState::Unselected | ToggleState::Indeterminate => {
context_server_manager.update(cx, |this, cx| {
this.stop_server(context_server.clone(), cx)
.log_err();
});
}
ToggleState::Selected => {
cx.spawn({
let context_server_manager =
context_server_manager.clone();
let context_server = context_server.clone();
async move |cx| {
if let Some(start_server_task) =
context_server_manager
.update(cx, |this, cx| {
this.start_server(
context_server,
cx,
)
})
.log_err()
{
start_server_task.await.log_err();
}
}
})
.detach();
}
}
}),
)),
)
.map(|parent| {
if !are_tools_expanded {
return parent;
}
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|(ix, tool)| {
h_flex()
.px_2()
.py_1()
.when(ix < tool_count - 1, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(Label::new(tool.name()))
},
)))
})
}))
.child(
h_flex()
.justify_between()
.gap_2()
.child(
h_flex().w_full().child(
Button::new("add-context-server", "Add Context Server")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.disabled(true)
.tooltip(Tooltip::text("Not yet implemented")),
),
)
.child(
h_flex().w_full().child(
Button::new(
"install-context-server-extensions",
"Install Context Server Extensions",
)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::DatabaseZap)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.on_click(|_event, window, cx| {
window.dispatch_action(
zed_actions::Extensions {
category_filter: Some(
ExtensionCategoryFilter::ContextServers,
),
}
.boxed_clone(),
cx,
)
}),
),
),
)
}
}
impl Render for AssistantConfiguration {
@@ -378,8 +182,6 @@ impl Render for AssistantConfiguration {
),
)
.child(Divider::horizontal().color(DividerColor::Border))
.child(self.render_context_servers_section(cx))
.child(Divider::horizontal().color(DividerColor::Border))
.child(
v_flex()
.p(DynamicSpacing::Base16.rems(cx))

View File

@@ -110,16 +110,19 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let tools = Arc::new(ToolWorkingSet::default());
let thread_store = workspace.update(cx, |workspace, cx| {
log::info!("[assistant2-debug] initializing ThreadStore");
let thread_store = workspace.update(&mut cx, |workspace, cx| {
let project = workspace.project().clone();
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
})??;
log::info!("[assistant2-debug] finished initializing ThreadStore");
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
log::info!("[assistant2-debug] initializing ContextStore");
let context_store = workspace
.update(cx, |workspace, cx| {
.update(&mut cx, |workspace, cx| {
let project = workspace.project().clone();
assistant_context_editor::ContextStore::new(
project,
@@ -129,8 +132,9 @@ impl AssistantPanel {
)
})?
.await?;
log::info!("[assistant2-debug] finished initializing ContextStore");
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
})
})
@@ -143,6 +147,7 @@ impl AssistantPanel {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
log::info!("[assistant2-debug] AssistantPanel::new");
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
let fs = workspace.app_state().fs.clone();
let project = workspace.project().clone();
@@ -344,9 +349,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let context = context.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,
@@ -377,9 +382,9 @@ impl AssistantPanel {
.thread_store
.update(cx, |this, cx| this.open_thread(thread_id, cx));
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let thread = open_thread_task.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
this.active_view = ActiveView::Thread;
let message_editor_context_store =
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
@@ -410,13 +415,8 @@ impl AssistantPanel {
}
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let context_server_manager = self.thread_store.read(cx).context_server_manager();
let tools = self.thread_store.read(cx).tools();
self.active_view = ActiveView::Configuration;
self.configuration = Some(
cx.new(|cx| AssistantConfiguration::new(context_server_manager, tools, window, cx)),
);
self.configuration = Some(cx.new(|cx| AssistantConfiguration::new(window, cx)));
if let Some(configuration) = self.configuration.as_ref() {
self.configuration_subscription = Some(cx.subscribe_in(
@@ -450,10 +450,10 @@ impl AssistantPanel {
.languages
.language_for_name("Markdown");
let thread = self.active_thread(cx);
cx.spawn_in(window, async move |_this, cx| {
cx.spawn_in(window, |_this, mut cx| async move {
let markdown_language = markdown_language_task.await?;
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
let thread = thread.read(cx);
let markdown = thread.to_markdown()?;
let thread_summary = thread

View File

@@ -367,7 +367,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, cx)?;
self.request = Some(request.clone());
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -480,207 +480,213 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone();
self.generation = cx.spawn(async move |codegen, cx| {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
self.generation = cx.spawn(|codegen, mut cx| {
async move {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
line_indent = None;
first_line = false;
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
}
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message = result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})?;
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
cx.notify();
})?;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(&mut cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
}
});
cx.notify();
}
@@ -798,7 +804,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot);
cx.spawn(async move |codegen, cx| {
cx.spawn(|codegen, mut cx| async move {
let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move {
let old_text = old_snapshot
@@ -848,7 +854,7 @@ impl CodegenAlternative {
.await;
codegen
.update(cx, |codegen, cx| {
.update(&mut cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify();

View File

@@ -281,8 +281,10 @@ impl ContextPicker {
context_store.add_file_from_path(project_path.clone(), cx)
});
cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
.detach();
cx.spawn_in(window, |_, mut cx| async move {
task.await.notify_async_err(&mut cx)
})
.detach();
cx.notify();
}
@@ -305,13 +307,13 @@ impl ContextPicker {
};
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx));
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let thread = open_thread_task.await?;
context_store.update(cx, |context_store, cx| {
context_store.update(&mut cx, |context_store, cx| {
context_store.add_thread(thread, cx);
})?;
this.update(cx, |_this, cx| cx.notify())
this.update(&mut cx, |_this, cx| cx.notify())
})
}

View File

@@ -206,12 +206,12 @@ impl PickerDelegate for FetchContextPickerDelegate {
let http_client = workspace.read(cx).client().http_client().clone();
let url = self.url.clone();
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let text = cx
.background_spawn(Self::build_message(http_client, url.clone()))
.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
this.delegate
.context_store
.update(cx, |context_store, _cx| {

View File

@@ -206,11 +206,11 @@ impl PickerDelegate for FileContextPickerDelegate {
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
// TODO: This should be probably be run in the background.
let paths = search_task.await;
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.delegate.matches = paths;
})
.log_err();
@@ -345,10 +345,10 @@ impl PickerDelegate for FileContextPickerDelegate {
};
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(cx) {
cx.spawn_in(window, |this, mut cx| async move {
match task.await.notify_async_err(&mut cx) {
None => anyhow::Ok(()),
Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
ConfirmBehavior::KeepOpen => {}
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
}),

View File

@@ -149,9 +149,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
}
});
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let matches = search_task.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.delegate.matches = matches;
this.delegate.selected_index = 0;
cx.notify();
@@ -171,9 +171,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let thread = open_thread_task.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
this.delegate
.context_store
.update(cx, |context_store, cx| context_store.add_thread(thread, cx))

View File

@@ -75,15 +75,15 @@ impl ContextStore {
return Task::ready(Err(anyhow!("failed to read project")));
};
cx.spawn(async move |this, cx| {
let open_buffer_task = project.update(cx, |project, cx| {
cx.spawn(|this, mut cx| async move {
let open_buffer_task = project.update(&mut cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
})?;
let buffer_entity = open_buffer_task.await?;
let buffer_id = this.update(cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let already_included = this.update(cx, |this, _cx| {
let already_included = this.update(&mut cx, |this, _cx| {
match this.will_include_buffer(buffer_id, &project_path.path) {
Some(FileInclusion::Direct(context_id)) => {
this.remove_context(context_id);
@@ -98,7 +98,7 @@ impl ContextStore {
return anyhow::Ok(());
}
let (buffer_info, text_task) = this.update(cx, |_, cx| {
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
let buffer = buffer_entity.read(cx);
collect_buffer_info_and_text(
project_path.path.clone(),
@@ -110,7 +110,7 @@ impl ContextStore {
let text = text_task.await;
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text));
})?;
@@ -123,8 +123,8 @@ impl ContextStore {
buffer_entity: Entity<Buffer>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
cx.spawn(async move |this, cx| {
let (buffer_info, text_task) = this.update(cx, |_, cx| {
cx.spawn(|this, mut cx| async move {
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
let buffer = buffer_entity.read(cx);
let Some(file) = buffer.file() else {
return Err(anyhow!("Buffer has no path."));
@@ -139,7 +139,7 @@ impl ContextStore {
let text = text_task.await;
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text))
})?;
@@ -179,18 +179,18 @@ impl ContextStore {
}
let worktree_id = project_path.worktree_id;
cx.spawn(async move |this, cx| {
let worktree = project.update(cx, |project, cx| {
cx.spawn(|this, mut cx| async move {
let worktree = project.update(&mut cx, |project, cx| {
project
.worktree_for_id(worktree_id, cx)
.ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}"))
})??;
let files = worktree.update(cx, |worktree, _cx| {
let files = worktree.update(&mut cx, |worktree, _cx| {
collect_files_in_path(worktree, &project_path.path)
})?;
let open_buffers_task = project.update(cx, |project, cx| {
let open_buffers_task = project.update(&mut cx, |project, cx| {
let tasks = files.iter().map(|file_path| {
project.open_buffer(
ProjectPath {
@@ -207,7 +207,7 @@ impl ContextStore {
let mut buffer_infos = Vec::new();
let mut text_tasks = Vec::new();
this.update(cx, |_, cx| {
this.update(&mut cx, |_, cx| {
for (path, buffer_entity) in files.into_iter().zip(buffers) {
// Skip all binary files and other non-UTF8 files
if let Ok(buffer_entity) = buffer_entity {
@@ -236,7 +236,7 @@ impl ContextStore {
bail!("No text files found in {}", &project_path.path.display());
}
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.insert_directory(&project_path.path, context_buffers);
})?;
@@ -595,10 +595,10 @@ fn refresh_file_text(
let id = file_context.id;
let task = refresh_context_buffer(&file_context.context_buffer, cx);
if let Some(task) = task {
Some(cx.spawn(async move |cx| {
Some(cx.spawn(|mut cx| async move {
let context_buffer = task.await;
context_store
.update(cx, |context_store, _| {
.update(&mut cx, |context_store, _| {
let new_file_context = FileContext { id, context_buffer };
context_store.replace_context(AssistantContext::File(new_file_context));
})
@@ -636,10 +636,10 @@ fn refresh_directory_text(
let id = directory_context.snapshot.id;
let path = directory_context.path.clone();
Some(cx.spawn(async move |cx| {
Some(cx.spawn(|mut cx| async move {
let context_buffers = context_buffers.await;
context_store
.update(cx, |context_store, _| {
.update(&mut cx, |context_store, _| {
let new_directory_context = DirectoryContext::new(id, &path, context_buffers);
context_store.replace_context(AssistantContext::Directory(new_directory_context));
})
@@ -654,9 +654,9 @@ fn refresh_thread_text(
) -> Task<()> {
let id = thread_context.id;
let thread = thread_context.thread.clone();
cx.spawn(async move |cx| {
cx.spawn(move |mut cx| async move {
context_store
.update(cx, |context_store, cx| {
.update(&mut cx, |context_store, cx| {
let text = thread.read(cx).text().into();
context_store.replace_context(AssistantContext::Thread(ThreadContext {
id,

View File

@@ -335,12 +335,12 @@ impl ContextStrip {
context_store.accept_suggested_context(&suggested, cx)
});
cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(cx) {
cx.spawn_in(window, |this, mut cx| async move {
match task.await.notify_async_err(&mut cx) {
None => {}
Some(()) => {
if let Some(this) = this.upgrade() {
this.update(cx, |_, cx| cx.notify())?;
this.update(&mut cx, |_, cx| cx.notify())?;
}
}
}

View File

@@ -276,7 +276,7 @@ impl InlineAssistant {
if is_authenticated() {
handle_assist(window, cx);
} else {
cx.spawn_in(window, async move |_workspace, cx| {
cx.spawn_in(window, |_workspace, mut cx| async move {
let Some(task) = cx.update(|_, cx| {
LanguageModelRegistry::read_global(cx)
.active_provider()
@@ -1456,9 +1456,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(),
scroll_lock: None,
highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn({
_update_highlights: cx.spawn(|cx| {
let editor = editor.downgrade();
async move |cx| {
async move {
while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1748,10 +1748,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let editor = self.editor.clone();
let workspace = self.workspace.clone();
let thread_store = self.thread_store.clone();
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let editor = editor.upgrade().context("editor was released")?;
let range = editor
.update(cx, |editor, cx| {
.update(&mut cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx);

View File

@@ -3,25 +3,23 @@ use std::sync::Arc;
use collections::HashSet;
use editor::actions::MoveUp;
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
use file_icons::FileIcons;
use fs::Fs;
use git::ExpandCommitEditor;
use git_ui::git_panel;
use gpui::{
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
WeakEntity,
};
use language_model::LanguageModelRegistry;
use language_model_selector::ToggleModelSelector;
use project::Project;
use rope::Point;
use settings::Settings;
use std::time::Duration;
use text::Bias;
use theme::ThemeSettings;
use ui::{
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle,
Tooltip,
};
use util::ResultExt;
use vim_mode_setting::VimModeSetting;
use workspace::notifications::{NotificationId, NotifyTaskExt};
use workspace::{Toast, Workspace};
@@ -39,7 +37,6 @@ pub struct MessageEditor {
thread: Entity<Thread>,
editor: Entity<Editor>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
context_store: Entity<ContextStore>,
context_strip: Entity<ContextStrip>,
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
@@ -47,6 +44,7 @@ pub struct MessageEditor {
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
model_selector: Entity<AssistantModelSelector>,
tool_selector: Entity<ToolSelector>,
edits_expanded: bool,
_subscriptions: Vec<Subscription>,
}
@@ -109,9 +107,8 @@ impl MessageEditor {
];
Self {
editor: editor.clone(),
project: thread.read(cx).project().clone(),
thread,
editor: editor.clone(),
workspace,
context_store,
context_strip,
@@ -128,6 +125,7 @@ impl MessageEditor {
)
}),
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
edits_expanded: false,
_subscriptions: subscriptions,
}
}
@@ -208,15 +206,12 @@ impl MessageEditor {
let thread = self.thread.clone();
let context_store = self.context_store.clone();
let git_store = self.project.read(cx).git_store();
let checkpoint = git_store.read(cx).checkpoint(cx);
cx.spawn(async move |_, cx| {
cx.spawn(move |_, mut cx| async move {
refresh_task.await;
let checkpoint = checkpoint.await.log_err();
thread
.update(cx, |thread, cx| {
.update(&mut cx, |thread, cx| {
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
thread.insert_user_message(user_message, context, checkpoint, cx);
thread.insert_user_message(user_message, context, cx);
thread.send_to_model(model, request_kind, cx);
})
.ok();
@@ -302,9 +297,9 @@ impl MessageEditor {
.thread
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
cx.spawn(async move |_, cx| {
cx.spawn(|_, mut cx| async move {
report.await?;
workspace.update(cx, |workspace, cx| {
workspace.update(&mut cx, |workspace, cx| {
let message = if is_positive {
"Positive feedback recorded. Thank you!"
} else {
@@ -352,12 +347,8 @@ impl Render for MessageEditor {
px(64.)
};
let project = self.thread.read(cx).project();
let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
repository.read(cx).status().count()
} else {
0
};
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx);
let changed_buffers_count = changed_buffers.len();
v_flex()
.size_full()
@@ -419,7 +410,7 @@ impl Render for MessageEditor {
),
)
})
.when(changed_files > 0, |parent| {
.when(changed_buffers_count > 0, |parent| {
parent.child(
v_flex()
.mx_2()
@@ -430,60 +421,96 @@ impl Render for MessageEditor {
.rounded_t_md()
.child(
h_flex()
.justify_between()
.gap_2()
.p_2()
.child(
h_flex()
.gap_2()
.child(
IconButton::new(
"edits-disclosure",
IconName::GitBranchSmall,
)
.icon_size(IconSize::Small)
.on_click(
|_ev, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&git_panel::ToggleFocus)
});
},
),
)
.child(
Label::new(format!(
"{} {} changed",
changed_files,
if changed_files == 1 { "file" } else { "files" }
))
.size(LabelSize::XSmall)
.color(Color::Muted),
),
Disclosure::new("edits-disclosure", self.edits_expanded)
.on_click(cx.listener(|this, _ev, _window, cx| {
this.edits_expanded = !this.edits_expanded;
cx.notify();
})),
)
.child(
h_flex()
.gap_2()
.child(
Button::new("review", "Review")
.label_size(LabelSize::XSmall)
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(
&git_ui::project_diff::Diff,
);
});
}),
)
.child(
Button::new("commit", "Commit")
.label_size(LabelSize::XSmall)
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&ExpandCommitEditor)
});
}),
),
Label::new("Edits")
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.child(Label::new("").size(LabelSize::XSmall).color(Color::Muted))
.child(
Label::new(format!(
"{} {}",
changed_buffers_count,
if changed_buffers_count == 1 {
"file"
} else {
"files"
}
))
.size(LabelSize::XSmall)
.color(Color::Muted),
),
),
)
.when(self.edits_expanded, |parent| {
parent.child(
v_flex().bg(cx.theme().colors().editor_background).children(
changed_buffers.enumerate().flat_map(|(index, buffer)| {
let file = buffer.read(cx).file()?;
let path = file.path();
let parent_label = path.parent().and_then(|parent| {
let parent_str = parent.to_string_lossy();
if parent_str.is_empty() {
None
} else {
Some(
Label::new(format!(
"{}{}",
parent_str,
std::path::MAIN_SEPARATOR_STR
))
.color(Color::Muted)
.size(LabelSize::Small),
)
}
});
let name_label = path.file_name().map(|name| {
Label::new(name.to_string_lossy().to_string())
.size(LabelSize::Small)
});
let file_icon = FileIcons::get_icon(&path, cx)
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
let element = div()
.p_2()
.when(index + 1 < changed_buffers_count, |parent| {
parent
.border_color(cx.theme().colors().border)
.border_b_1()
})
.child(
h_flex()
.gap_2()
.child(file_icon)
.child(
// TODO: handle overflow
h_flex()
.children(parent_label)
.children(name_label),
)
// TODO: show lines changed
.child(Label::new("+").color(Color::Created))
.child(Label::new("-").color(Color::Deleted)),
);
Some(element)
}),
),
)
}),
)
})
.child(

View File

@@ -40,7 +40,7 @@ impl TerminalCodegen {
let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(async move |this, cx| {
self.generation = cx.spawn(|this, mut cx| async move {
let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await;
@@ -97,12 +97,12 @@ impl TerminalCodegen {
}
});
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.message_id = message_id;
})?;
while let Some(hunk) = hunks_rx.next().await {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx);
cx.notify();
@@ -116,7 +116,7 @@ impl TerminalCodegen {
let result = generate.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {

View File

@@ -16,7 +16,6 @@ use language_model::{
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
Role, StopReason, TokenUsage,
};
use project::git::GitStoreCheckpoint;
use project::Project;
use prompt_store::{AssistantSystemPromptWorktree, PromptBuilder};
use scripting_tool::{ScriptingSession, ScriptingTool};
@@ -90,12 +89,6 @@ pub struct GitState {
pub diff: Option<String>,
}
#[derive(Clone)]
pub struct ThreadCheckpoint {
message_id: MessageId,
git_checkpoint: GitStoreCheckpoint,
}
/// A thread of conversation with the LLM.
pub struct Thread {
id: ThreadId,
@@ -106,7 +99,6 @@ pub struct Thread {
next_message_id: MessageId,
context: BTreeMap<ContextId, ContextSnapshot>,
context_by_message: HashMap<MessageId, Vec<ContextId>>,
checkpoints_by_message: HashMap<MessageId, GitStoreCheckpoint>,
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
project: Entity<Project>,
@@ -136,7 +128,6 @@ impl Thread {
next_message_id: MessageId(0),
context: BTreeMap::default(),
context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project: project.clone(),
@@ -197,7 +188,6 @@ impl Thread {
next_message_id,
context: BTreeMap::default(),
context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project,
@@ -259,45 +249,6 @@ impl Thread {
&self.tools
}
pub fn checkpoint_for_message(&self, id: MessageId) -> Option<ThreadCheckpoint> {
let checkpoint = self.checkpoints_by_message.get(&id).cloned()?;
Some(ThreadCheckpoint {
message_id: id,
git_checkpoint: checkpoint,
})
}
pub fn restore_checkpoint(
&mut self,
checkpoint: ThreadCheckpoint,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let project = self.project.read(cx);
let restore = project
.git_store()
.read(cx)
.restore_checkpoint(checkpoint.git_checkpoint, cx);
cx.spawn(async move |this, cx| {
restore.await?;
this.update(cx, |this, cx| this.truncate(checkpoint.message_id, cx))
})
}
pub fn truncate(&mut self, message_id: MessageId, cx: &mut Context<Self>) {
let Some(message_ix) = self
.messages
.iter()
.rposition(|message| message.id == message_id)
else {
return;
};
for deleted_message in self.messages.drain(message_ix..) {
self.context_by_message.remove(&deleted_message.id);
self.checkpoints_by_message.remove(&deleted_message.id);
}
cx.notify();
}
pub fn context_for_message(&self, id: MessageId) -> Option<Vec<ContextSnapshot>> {
let context = self.context_by_message.get(&id)?;
Some(
@@ -345,6 +296,13 @@ impl Thread {
self.scripting_tool_use.tool_results_for_message(id)
}
pub fn scripting_changed_buffers<'a>(
&self,
cx: &'a App,
) -> impl ExactSizeIterator<Item = &'a Entity<language::Buffer>> {
self.scripting_session.read(cx).changed_buffers()
}
pub fn message_has_tool_results(&self, message_id: MessageId) -> bool {
self.tool_use.message_has_tool_results(message_id)
}
@@ -357,7 +315,6 @@ impl Thread {
&mut self,
text: impl Into<String>,
context: Vec<ContextSnapshot>,
checkpoint: Option<GitStoreCheckpoint>,
cx: &mut Context<Self>,
) -> MessageId {
let message_id = self.insert_message(Role::User, text, cx);
@@ -365,9 +322,6 @@ impl Thread {
self.context
.extend(context.into_iter().map(|context| (context.id, context)));
self.context_by_message.insert(message_id, context_ids);
if let Some(checkpoint) = checkpoint {
self.checkpoints_by_message.insert(message_id, checkpoint);
}
message_id
}
@@ -440,9 +394,9 @@ impl Thread {
/// Serializes this thread into a format for storage or telemetry.
pub fn serialize(&self, cx: &mut Context<Self>) -> Task<Result<SerializedThread>> {
let initial_project_snapshot = self.initial_project_snapshot.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, cx| async move {
let initial_project_snapshot = initial_project_snapshot.await;
this.read_with(cx, |this, _| SerializedThread {
this.read_with(&cx, |this, _| SerializedThread {
summary: this.summary_or_default(),
updated_at: this.updated_at(),
messages: this
@@ -648,10 +602,8 @@ impl Thread {
) {
let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn(async move |thread, cx| {
let task = cx.spawn(|thread, mut cx| async move {
let stream = model.stream_completion(request, &cx);
let initial_token_usage =
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage.clone());
let stream_completion = async {
let mut events = stream.await?;
let mut stop_reason = StopReason::EndTurn;
@@ -660,7 +612,7 @@ impl Thread {
while let Some(event) = events.next().await {
let event = event?;
thread.update(cx, |thread, cx| {
thread.update(&mut cx, |thread, cx| {
match event {
LanguageModelCompletionEvent::StartMessage { .. } => {
thread.insert_message(Role::Assistant, String::new(), cx);
@@ -719,7 +671,7 @@ impl Thread {
smol::future::yield_now().await;
}
thread.update(cx, |thread, cx| {
thread.update(&mut cx, |thread, cx| {
thread
.pending_completions
.retain(|completion| completion.id != pending_completion_id);
@@ -735,7 +687,7 @@ impl Thread {
let result = stream_completion.await;
thread
.update(cx, |thread, cx| {
.update(&mut cx, |thread, cx| {
match result.as_ref() {
Ok(stop_reason) => match stop_reason {
StopReason::ToolUse => {
@@ -766,21 +718,6 @@ impl Thread {
}
}
cx.emit(ThreadEvent::DoneStreaming);
if let Ok(initial_usage) = initial_token_usage {
let usage = thread.cumulative_token_usage.clone() - initial_usage;
telemetry::event!(
"Assistant Thread Completion",
thread_id = thread.id().to_string(),
model = model.telemetry_id(),
model_provider = model.provider_id().to_string(),
input_tokens = usage.input_tokens,
output_tokens = usage.output_tokens,
cache_creation_input_tokens = usage.cache_creation_input_tokens,
cache_read_input_tokens = usage.cache_read_input_tokens,
);
}
})
.ok();
});
@@ -813,7 +750,7 @@ impl Thread {
cache: false,
});
self.pending_summary = cx.spawn(async move |this, cx| {
self.pending_summary = cx.spawn(|this, mut cx| {
async move {
let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?;
@@ -830,7 +767,7 @@ impl Thread {
}
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if !new_summary.is_empty() {
this.summary = Some(new_summary.into());
}
@@ -841,7 +778,6 @@ impl Thread {
anyhow::Ok(())
}
.log_err()
.await
});
}
@@ -887,10 +823,10 @@ impl Thread {
});
let session = self.scripting_session.clone();
cx.spawn(async move |_, cx| {
cx.spawn(|_, cx| async move {
script_task.await;
let message = session.read_with(cx, |session, _cx| {
let message = session.read_with(&cx, |session, _cx| {
// Using a id to get the script output seems impractical.
// Why not just include it in the Task result?
// This is because we'll later report the script state as it runs,
@@ -915,12 +851,12 @@ impl Thread {
output: Task<Result<String>>,
cx: &mut Context<Self>,
) {
let insert_output_task = cx.spawn({
let insert_output_task = cx.spawn(|thread, mut cx| {
let tool_use_id = tool_use_id.clone();
async move |thread, cx| {
async move {
let output = output.await;
thread
.update(cx, |thread, cx| {
.update(&mut cx, |thread, cx| {
let pending_tool_use = thread
.tool_use
.insert_tool_output(tool_use_id.clone(), output);
@@ -945,12 +881,12 @@ impl Thread {
output: Task<Result<String>>,
cx: &mut Context<Self>,
) {
let insert_output_task = cx.spawn({
let insert_output_task = cx.spawn(|thread, mut cx| {
let tool_use_id = tool_use_id.clone();
async move |thread, cx| {
async move {
let output = output.await;
thread
.update(cx, |thread, cx| {
.update(&mut cx, |thread, cx| {
let pending_tool_use = thread
.scripting_tool_use
.insert_tool_output(tool_use_id.clone(), output);
@@ -987,7 +923,6 @@ impl Thread {
// so for now we provide some text to keep the model on track.
"Here are the tool results.",
Vec::new(),
None,
cx,
);
}
@@ -1050,7 +985,7 @@ impl Thread {
.map(|worktree| Self::worktree_snapshot(worktree, cx))
.collect();
cx.spawn(async move |_, cx| {
cx.spawn(move |_, cx| async move {
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
let mut unsaved_buffers = Vec::new();
@@ -1077,7 +1012,7 @@ impl Thread {
}
fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> {
cx.spawn(async move |cx| {
cx.spawn(move |cx| async move {
// Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| {
let worktree = worktree.read(app_cx);
@@ -1101,7 +1036,7 @@ impl Thread {
let current_branch = repo_entry.branch().map(|branch| branch.name.to_string());
// Get repository info
let repo_result = worktree.read_with(cx, |worktree, _cx| {
let repo_result = worktree.read_with(&cx, |worktree, _cx| {
if let project::Worktree::Local(local_worktree) = &worktree {
local_worktree.get_local_repo(repo_entry).map(|local_repo| {
let repo = local_repo.repo();
@@ -1116,7 +1051,7 @@ impl Thread {
Ok(Some((remote_url, head_sha, repository))) => {
// Get diff asynchronously
let diff = repository
.diff(git::repository::DiffType::HeadToWorktree, cx.clone())
.diff(git::repository::DiffType::HeadToWorktree, cx)
.await
.ok();
@@ -1191,10 +1126,6 @@ impl Thread {
&self.action_log
}
pub fn project(&self) -> &Entity<Project> {
&self.project
}
pub fn cumulative_token_usage(&self) -> TokenUsage {
self.cumulative_token_usage.clone()
}

View File

@@ -65,14 +65,6 @@ impl ThreadStore {
Ok(this)
}
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
self.context_server_manager.clone()
}
pub fn tools(&self) -> Arc<ToolWorkingSet> {
self.tools.clone()
}
/// Returns the number of threads.
pub fn thread_count(&self) -> usize {
self.threads.len()
@@ -106,14 +98,14 @@ impl ThreadStore {
) -> Task<Result<Entity<Thread>>> {
let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let database = database_future.await.map_err(|err| anyhow!(err))?;
let thread = database
.try_find_thread(id.clone())
.await?
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
cx.new(|cx| {
Thread::deserialize(
id.clone(),
@@ -133,23 +125,23 @@ impl ThreadStore {
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let serialized_thread = serialized_thread.await?;
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.save_thread(metadata, serialized_thread).await?;
this.update(cx, |this, cx| this.reload(cx))?.await
this.update(&mut cx, |this, cx| this.reload(cx))?.await
})
}
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?;
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.threads.retain(|thread| thread.id != id)
})
})
@@ -157,14 +149,14 @@ impl ThreadStore {
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let threads = database_future
.await
.map_err(|err| anyhow!(err))?
.list_threads()
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.threads = threads;
cx.notify();
})
@@ -193,7 +185,7 @@ impl ThreadStore {
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
async move |this, cx| {
|this, mut cx| async move {
let Some(protocol) = server.client() else {
return;
};
@@ -218,7 +210,7 @@ impl ThreadStore {
})
.collect::<Vec<_>>();
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.context_server_tool_ids.insert(server_id, tool_ids);
})
.log_err();

View File

@@ -5,19 +5,13 @@ use gpui::Entity;
use scripting_tool::ScriptingTool;
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use crate::agent_profile::AgentProfile;
pub struct ToolSelector {
profiles: Vec<AgentProfile>,
tools: Arc<ToolWorkingSet>,
}
impl ToolSelector {
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
Self {
profiles: vec![AgentProfile::read_only(), AgentProfile::code_writer()],
tools,
}
Self { tools }
}
fn build_context_menu(
@@ -25,31 +19,9 @@ impl ToolSelector {
window: &mut Window,
cx: &mut Context<Self>,
) -> Entity<ContextMenu> {
let profiles = self.profiles.clone();
let tool_set = self.tools.clone();
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
let icon_position = IconPosition::End;
menu = menu.header("Profiles");
for profile in profiles.clone() {
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
let tools = tool_set.clone();
move |_window, cx| {
tools.disable_source(ToolSource::Native, cx);
tools.enable(
ToolSource::Native,
&profile
.tools
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
);
}
});
}
menu = menu.separator();
let tools_by_source = tool_set.tools_by_source(cx);
let all_tools_enabled = tool_set.are_all_tools_enabled();

View File

@@ -286,17 +286,9 @@ impl ToolUseState {
) {
if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) {
for tool_use in tool_uses {
if self.tool_results.contains_key(&tool_use.id) {
// Do not send tool uses until they are completed
request_message
.content
.push(MessageContent::ToolUse(tool_use.clone()));
} else {
log::debug!(
"skipped tool use {:?} because it is still pending",
tool_use
);
}
request_message
.content
.push(MessageContent::ToolUse(tool_use.clone()));
}
}
}
@@ -309,19 +301,9 @@ impl ToolUseState {
if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) {
for tool_use_id in tool_uses {
if let Some(tool_result) = self.tool_results.get(tool_use_id) {
request_message.content.push(MessageContent::ToolResult(
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
is_error: tool_result.is_error,
content: if tool_result.content.is_empty() {
// Surprisingly, the API fails if we return an empty string here.
// It thinks we are sending a tool use without a tool result.
"<Tool returned an empty string>".into()
} else {
tool_result.content.clone()
},
},
));
request_message
.content
.push(MessageContent::ToolResult(tool_result.clone()));
}
}
}

View File

@@ -1144,9 +1144,9 @@ impl AssistantContext {
fn set_language(&mut self, cx: &mut Context<Self>) {
let markdown = self.language_registry.language_for_name("Markdown");
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let markdown = markdown.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.buffer
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
})
@@ -1188,7 +1188,7 @@ impl AssistantContext {
return;
};
let debounce = self.token_count.is_some();
self.pending_token_count = cx.spawn(async move |this, cx| {
self.pending_token_count = cx.spawn(|this, mut cx| {
async move {
if debounce {
cx.background_executor()
@@ -1197,14 +1197,13 @@ impl AssistantContext {
}
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.token_count = Some(token_count);
this.start_cache_warming(&model, cx);
cx.notify()
})
}
.log_err()
.await
});
}
@@ -1343,7 +1342,7 @@ impl AssistantContext {
};
let model = Arc::clone(model);
self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
async move {
match model.stream_completion(request, &cx).await {
Ok(mut stream) => {
@@ -1354,14 +1353,13 @@ impl AssistantContext {
log::warn!("Cache warming failed: {}", e);
}
};
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.update_cache_status_for_completion(cx);
})
.ok();
anyhow::Ok(())
}
.log_err()
.await
});
}
@@ -1918,7 +1916,7 @@ impl AssistantContext {
});
self.reparse(cx);
let insert_output_task = cx.spawn(async move |this, cx| {
let insert_output_task = cx.spawn(|this, mut cx| async move {
let run_command = async {
let mut stream = output.await?;
@@ -1935,7 +1933,7 @@ impl AssistantContext {
while let Some(event) = stream.next().await {
let event = event?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.buffer.update(cx, |buffer, _cx| {
buffer.finalize_last_transaction();
buffer.start_transaction()
@@ -2036,7 +2034,7 @@ impl AssistantContext {
})?;
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction();
buffer.start_transaction();
@@ -2082,7 +2080,7 @@ impl AssistantContext {
let command_result = run_command.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id)
@@ -2212,7 +2210,7 @@ impl AssistantContext {
let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn({
async move |this, cx| {
|this, mut cx| async move {
let stream = model.stream_completion(request, &cx);
let assistant_message_id = assistant_message.id;
let mut response_latency = None;
@@ -2227,7 +2225,7 @@ impl AssistantContext {
}
let event = event?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let message_ix = this
.message_anchors
.iter()
@@ -2266,7 +2264,7 @@ impl AssistantContext {
})?;
smol::future::yield_now().await;
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_completions
.retain(|completion| completion.id != pending_completion_id);
this.summarize(false, cx);
@@ -2278,7 +2276,7 @@ impl AssistantContext {
let result = stream_completion.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let error_message = if let Some(error) = result.as_ref().err() {
if error.is::<PaymentRequiredError>() {
cx.emit(ContextEvent::ShowPaymentRequiredError);
@@ -2788,7 +2786,7 @@ impl AssistantContext {
cache: false,
});
self.pending_summary = cx.spawn(async move |this, cx| {
self.pending_summary = cx.spawn(|this, mut cx| {
async move {
let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?;
@@ -2797,7 +2795,7 @@ impl AssistantContext {
while let Some(message) = messages.stream.next().await {
let text = message?;
let mut lines = text.lines();
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
let summary = this.summary.get_or_insert(ContextSummary::default());
@@ -2821,7 +2819,7 @@ impl AssistantContext {
}
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
if let Some(summary) = this.summary.as_mut() {
@@ -2839,7 +2837,6 @@ impl AssistantContext {
anyhow::Ok(())
}
.log_err()
.await
});
}
}
@@ -2946,12 +2943,12 @@ impl AssistantContext {
return;
}
self.pending_save = cx.spawn(async move |this, cx| {
self.pending_save = cx.spawn(|this, mut cx| async move {
if let Some(debounce) = debounce {
cx.background_executor().timer(debounce).await;
}
let (old_path, summary) = this.read_with(cx, |this, _| {
let (old_path, summary) = this.read_with(&cx, |this, _| {
let path = this.path.clone();
let summary = if let Some(summary) = this.summary.as_ref() {
if summary.done {
@@ -2966,7 +2963,7 @@ impl AssistantContext {
})?;
if let Some(summary) = summary {
let context = this.read_with(cx, |this, cx| this.serialize(cx))?;
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
let mut discriminant = 1;
let mut new_path;
loop {
@@ -2998,7 +2995,7 @@ impl AssistantContext {
}
}
this.update(cx, |this, _| this.path = Some(new_path))?;
this.update(&mut cx, |this, _| this.path = Some(new_path))?;
}
Ok(())

View File

@@ -907,7 +907,7 @@ impl ContextEditor {
if editor_state.opened_patch != patch {
state.update_task = Some({
let this = this.clone();
cx.spawn_in(window, async move |_, cx| {
cx.spawn_in(window, |_, cx| async move {
Self::update_patch_editor(this.clone(), patch, cx)
.await
.log_err();
@@ -1070,9 +1070,10 @@ impl ContextEditor {
})
.ok();
} else {
patch_state.update_task = Some(cx.spawn_in(window, async move |this, cx| {
Self::open_patch_editor(this, new_patch, cx).await.log_err();
}));
patch_state.update_task =
Some(cx.spawn_in(window, move |this, cx| async move {
Self::open_patch_editor(this, new_patch, cx).await.log_err();
}));
}
}
}
@@ -1102,10 +1103,10 @@ impl ContextEditor {
async fn open_patch_editor(
this: WeakEntity<Self>,
patch: AssistantPatch,
cx: &mut AsyncWindowContext,
mut cx: AsyncWindowContext,
) -> Result<()> {
let project = this.read_with(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), cx).await;
let project = this.read_with(&cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
let editor = cx.new_window_entity(|window, cx| {
let editor = ProposedChangesEditor::new(
@@ -1129,7 +1130,7 @@ impl ContextEditor {
editor
})?;
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
if let Some(patch_state) = this.patches.get_mut(&patch.range) {
patch_state.editor = Some(PatchEditorState {
editor: editor.downgrade(),
@@ -1138,8 +1139,8 @@ impl ContextEditor {
patch_state.update_task.take();
}
})?;
this.read_with(cx, |this, _| this.workspace.clone())?
.update_in(cx, |workspace, window, cx| {
this.read_with(&cx, |this, _| this.workspace.clone())?
.update_in(&mut cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx)
})
.log_err();
@@ -1150,11 +1151,11 @@ impl ContextEditor {
async fn update_patch_editor(
this: WeakEntity<Self>,
patch: AssistantPatch,
cx: &mut AsyncWindowContext,
mut cx: AsyncWindowContext,
) -> Result<()> {
let project = this.update(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), cx).await;
this.update_in(cx, |this, window, cx| {
let project = this.update(&mut cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
this.update_in(&mut cx, |this, window, cx| {
let patch_state = this.patches.get_mut(&patch.range)?;
let locations = resolved_patch
@@ -1624,14 +1625,14 @@ impl ContextEditor {
.map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx))
.collect::<Vec<_>>();
cx.spawn(async move |_, cx| {
cx.spawn(move |_, cx| async move {
let mut paths = vec![];
let mut worktrees = vec![];
let opened_paths = futures::future::join_all(tasks).await;
for (worktree, project_path) in opened_paths.into_iter().flatten() {
let Ok(worktree_root_name) =
worktree.read_with(cx, |worktree, _| worktree.root_name().to_string())
worktree.read_with(&cx, |worktree, _| worktree.root_name().to_string())
else {
continue;
};
@@ -1648,12 +1649,12 @@ impl ContextEditor {
};
window
.spawn(cx, async move |cx| {
.spawn(cx, |mut cx| async move {
let (paths, dragged_file_worktrees) = paths.await;
let cmd_name = FileSlashCommand.name();
context_editor_view
.update_in(cx, |context_editor, window, cx| {
.update_in(&mut cx, |context_editor, window, cx| {
let file_argument = paths
.into_iter()
.map(|path| path.to_string_lossy().to_string())
@@ -2199,9 +2200,9 @@ impl ContextEditor {
.log_err();
if let Some(client) = client {
cx.spawn(async move |this, cx| {
client.authenticate_and_connect(true, cx).await?;
this.update(cx, |_, cx| cx.notify())
cx.spawn(|this, mut cx| async move {
client.authenticate_and_connect(true, &mut cx).await?;
this.update(&mut cx, |_, cx| cx.notify())
})
.detach_and_log_err(cx)
}
@@ -3160,10 +3161,10 @@ impl FollowableItem for ContextEditor {
assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx)
});
Some(window.spawn(cx, async move |cx| {
Some(window.spawn(cx, |mut cx| async move {
let context_editor = context_editor_task.await?;
context_editor
.update_in(cx, |context_editor, window, cx| {
.update_in(&mut cx, |context_editor, window, cx| {
context_editor.remote_id = Some(id);
context_editor.editor.update(cx, |editor, cx| {
editor.apply_update_proto(

View File

@@ -164,9 +164,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let search = self.store.read(cx).search(query, cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let matches = search.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let host_contexts = this.delegate.store.read(cx).host_contexts();
this.delegate.matches = host_contexts
.iter()

View File

@@ -100,7 +100,7 @@ impl ContextStore {
let fs = project.read(cx).fs().clone();
let languages = project.read(cx).languages().clone();
let telemetry = project.read(cx).client().telemetry().clone();
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
@@ -125,15 +125,16 @@ impl ContextStore {
languages,
slash_commands,
telemetry,
_watch_updates: cx.spawn(async move |this, cx| {
_watch_updates: cx.spawn(|this, mut cx| {
async move {
while events.next().await.is_some() {
this.update(cx, |this, cx| this.reload(cx))?.await.log_err();
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
}
anyhow::Ok(())
}
.log_err()
.await
}),
client_subscription: None,
_project_subscriptions: vec![
@@ -394,7 +395,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
let request = self.client.request(proto::CreateContext { project_id });
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let context_id = ContextId::from_proto(response.context_id);
let context_proto = response.context.context("invalid context")?;
@@ -420,8 +421,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(cx, |this, cx| {
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
@@ -456,7 +457,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let saved_context = load.await?;
let context = cx.new(|cx| {
AssistantContext::deserialize(
@@ -470,7 +471,7 @@ impl ContextStore {
cx,
)
})?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
existing_context
} else {
@@ -488,7 +489,7 @@ impl ContextStore {
) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
fs.remove_file(
&path,
RemoveOptions {
@@ -498,7 +499,7 @@ impl ContextStore {
)
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.contexts.retain(|context| {
context
.upgrade()
@@ -564,7 +565,7 @@ impl ContextStore {
});
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = request.await?;
let context_proto = response.context.context("invalid context")?;
let context = cx.new(|cx| {
@@ -589,8 +590,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(cx, |this, cx| {
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
@@ -699,12 +700,12 @@ impl ContextStore {
project_id,
contexts,
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, cx| async move {
let response = request.await?;
let mut context_ids = Vec::new();
let mut operations = Vec::new();
this.read_with(cx, |this, cx| {
this.read_with(&cx, |this, cx| {
for context_version_proto in response.contexts {
let context_version = ContextVersion::from_proto(&context_version_proto);
let context_id = ContextId::from_proto(context_version_proto.context_id);
@@ -767,7 +768,7 @@ impl ContextStore {
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
fs.create_dir(contexts_dir()).await?;
let mut paths = fs.read_dir(contexts_dir()).await?;
@@ -807,7 +808,7 @@ impl ContextStore {
}
contexts.sort_unstable_by_key(|context| Reverse(context.mtime));
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.contexts_metadata = contexts;
cx.notify();
})
@@ -818,7 +819,7 @@ impl ContextStore {
cx.update_entity(
&self.context_server_manager,
|context_server_manager, cx| {
for server in context_server_manager.running_servers() {
for server in context_server_manager.servers() {
context_server_manager
.restart_server(&server.id(), cx)
.detach_and_log_err(cx);
@@ -849,7 +850,7 @@ impl ContextStore {
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
async move |this, cx| {
|this, mut cx| async move {
let Some(protocol) = server.client() else {
return;
};
@@ -874,7 +875,7 @@ impl ContextStore {
})
.collect::<Vec<_>>();
this.update( cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.context_server_slash_command_ids
.insert(server_id.clone(), slash_command_ids);
})

View File

@@ -59,7 +59,7 @@ impl SlashCommandCompletionProvider {
let command_name = command_name.to_string();
let editor = self.editor.clone();
let workspace = self.workspace.clone();
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let matches = match_strings(
&candidates,
&command_name,

View File

@@ -100,7 +100,7 @@ impl PickerDelegate for SlashCommandDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let all_commands = self.all_commands.clone();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let filtered_commands = cx
.background_spawn(async move {
if query.is_empty() {
@@ -119,7 +119,7 @@ impl PickerDelegate for SlashCommandDelegate {
})
.await;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
this.delegate.filtered_commands = filtered_commands;
this.delegate.set_selected_index(0, window, cx);
cx.notify();

View File

@@ -63,14 +63,14 @@ impl Eval {
model: Arc<dyn LanguageModel>,
cx: &mut App,
) -> Task<anyhow::Result<EvalOutput>> {
cx.spawn(async move |cx| {
cx.spawn(move |mut cx| async move {
checkout_repo(&self.eval_setup, &self.repo_path).await?;
let (assistant, done_rx) =
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
let _worktree = assistant
.update(cx, |assistant, cx| {
.update(&mut cx, |assistant, cx| {
assistant.project.update(cx, |project, cx| {
project.create_worktree(&self.repo_path, true, cx)
})
@@ -79,10 +79,10 @@ impl Eval {
let start_time = std::time::SystemTime::now();
assistant.update(cx, |assistant, cx| {
assistant.update(&mut cx, |assistant, cx| {
assistant.thread.update(cx, |thread, cx| {
let context = vec![];
thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
thread.insert_user_message(self.user_prompt.clone(), context, cx);
thread.send_to_model(model, RequestKind::Chat, cx);
});
})?;
@@ -93,7 +93,7 @@ impl Eval {
let diff = query_git(&self.repo_path, vec!["diff"]).await?;
assistant.update(cx, |assistant, cx| {
assistant.update(&mut cx, |assistant, cx| {
let thread = assistant.thread.read(cx);
let last_message = thread.messages().last().unwrap();
if last_message.role != language_model::Role::Assistant {

View File

@@ -212,7 +212,7 @@ pub fn authenticate_model_provider(
pub async fn send_language_model_request(
model: Arc<dyn LanguageModel>,
request: LanguageModelRequest,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> anyhow::Result<String> {
match model.stream_completion_text(request, &cx).await {
Ok(mut stream) => {

View File

@@ -61,7 +61,7 @@ impl Judge {
};
let model = self.model.clone();
cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
cx.spawn(move |cx| send_language_model_request(model, request, cx))
}
}

View File

@@ -111,7 +111,7 @@ fn main() {
let editor_model_provider_id = editor_model.provider_id();
let judge_model_provider_id = judge_model.provider_id();
cx.spawn(async move |cx| {
cx.spawn(move |cx| async move {
// Authenticate all model providers first
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
.unwrap()

View File

@@ -77,8 +77,8 @@ impl SlashCommand for AutoCommand {
let cx: &mut App = cx;
cx.spawn(async move |cx| {
let task = project_index.read_with(cx, |project_index, cx| {
cx.spawn(|cx: gpui::AsyncApp| async move {
let task = project_index.read_with(&cx, |project_index, cx| {
project_index.flush_summary_backlogs(cx)
})?;
@@ -117,9 +117,9 @@ impl SlashCommand for AutoCommand {
return Task::ready(Err(anyhow!("no project indexer")));
};
let task = window.spawn(cx, async move |cx| {
let task = window.spawn(cx, |cx| async move {
let summaries = project_index
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
.read_with(&cx, |project_index, cx| project_index.all_summaries(cx))?
.await?;
commands_for_summaries(&summaries, &original_prompt, &cx).await

View File

@@ -186,7 +186,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
window.spawn(cx, async move |_| {
window.spawn(cx, move |_| async move {
task.await?
.map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found"))
@@ -268,7 +268,7 @@ fn collect_diagnostics(
})
.collect();
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let mut output = SlashCommandOutput::default();
if let Some(error_source) = error_source.as_ref() {
@@ -299,7 +299,7 @@ fn collect_diagnostics(
}
if let Some(buffer) = project_handle
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))?
.await
.log_err()
{

View File

@@ -241,7 +241,7 @@ fn collect_files(
.collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
for snapshot in snapshots {
let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
@@ -352,7 +352,7 @@ fn collect_files(
)))?;
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(cx, |project, cx| {
.update(&mut cx, |project, cx| {
project.open_buffer((worktree_id, &entry.path), cx)
})
.ok()
@@ -361,7 +361,7 @@ fn collect_files(
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
Some(&path_including_worktree_name),

View File

@@ -99,7 +99,7 @@ impl SlashCommand for ProjectSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt =
@@ -123,7 +123,7 @@ impl SlashCommand for ProjectSlashCommand {
.search_queries;
let results = project_index
.read_with(cx, |project_index, cx| {
.read_with(&cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx)
})?
.await?;

View File

@@ -109,9 +109,9 @@ impl SlashCommand for SearchSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, async move |cx| {
window.spawn(cx, |cx| async move {
let results = project_index
.read_with(cx, |project_index, cx| {
.read_with(&cx, |project_index, cx| {
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})?
.await?;

View File

@@ -86,7 +86,7 @@ impl SlashCommand for TabSlashCommand {
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
window.spawn(cx, async move |_| {
window.spawn(cx, |_| async move {
let tab_items = tab_items_search.await?;
let run_command = tab_items.len() == 1;
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
@@ -172,11 +172,11 @@ fn tab_items_for_queries(
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
let queries = queries.to_owned();
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let mut open_buffers =
workspace
.context("no workspace")?
.update(cx, |workspace, cx| {
.update(&mut cx, |workspace, cx| {
if strict_match && empty_query {
let snapshot = active_item_buffer(workspace, cx)?;
let full_path = snapshot.resolve_file_path(cx, true);

View File

@@ -43,4 +43,3 @@ language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
workspace = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -50,7 +50,7 @@ impl Tool for BashTool {
};
let working_directory = worktree.read(cx).abs_path();
cx.spawn(async move |_| {
cx.spawn(|_| async move {
// Add 2>&1 to merge stderr into stdout for proper interleaving.
let command = format!("({}) 2>&1", input.command);

View File

@@ -65,10 +65,10 @@ impl Tool for DiagnosticsTool {
};
let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx));
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
let mut output = String::new();
let buffer = buffer.await?;
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
let snapshot = buffer.read_with(&cx, |buffer, _cx| buffer.snapshot())?;
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];

View File

@@ -1,6 +1,5 @@
mod edit_action;
pub mod log;
mod replace;
use anyhow::{anyhow, Context, Result};
use assistant_tool::{ActionLog, Tool};
@@ -12,12 +11,12 @@ use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
};
use log::{EditToolLog, EditToolRequestId};
use project::Project;
use replace::{replace_exact, replace_with_flexible_indent};
use project::{search::SearchQuery, Project};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::fmt::Write;
use std::sync::Arc;
use util::paths::PathMatcher;
use util::ResultExt;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -104,7 +103,7 @@ impl Tool for EditFilesTool {
cx,
);
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let result = task.await;
let str_result = match &result {
@@ -112,8 +111,10 @@ impl Tool for EditFilesTool {
Err(err) => Err(err.to_string()),
};
log.update(cx, |log, cx| log.set_tool_output(req_id, str_result, cx))
.log_err();
log.update(&mut cx, |log, cx| {
log.set_tool_output(req_id, str_result, cx)
})
.log_err();
result
})
@@ -187,7 +188,7 @@ impl EditToolRequest {
cache: false,
});
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let llm_request = LanguageModelRequest {
messages,
tools: vec![],
@@ -210,10 +211,10 @@ impl EditToolRequest {
};
while let Some(chunk) = chunks.stream.next().await {
request.process_response_chunk(&chunk?, cx).await?;
request.process_response_chunk(&chunk?, &mut cx).await?;
}
request.finalize(cx).await
request.finalize(&mut cx).await
})
}
@@ -290,18 +291,41 @@ impl EditToolRequest {
file_path: std::path::PathBuf,
snapshot: language::BufferSnapshot,
) -> Result<DiffResult> {
let result =
// Try to match exactly
replace_exact(&old, &new, &snapshot)
.await
// If that fails, try being flexible about indentation
.or_else(|| replace_with_flexible_indent(&old, &new, &snapshot));
let query = SearchQuery::text(
old.clone(),
false,
true,
true,
PathMatcher::new(&[])?,
PathMatcher::new(&[])?,
None,
)?;
let Some(diff) = result else {
return anyhow::Ok(DiffResult::BadSearch(BadSearch {
search: old,
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return Ok(DiffResult::BadSearch(BadSearch {
search: new.clone(),
file_path: file_path.display().to_string(),
}));
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
};
anyhow::Ok(DiffResult::Diff(diff))
@@ -354,29 +378,25 @@ impl EditToolRequest {
if !self.bad_searches.is_empty() {
writeln!(
&mut output,
"\n\n# {} SEARCH/REPLACE block(s) failed to match:\n",
self.bad_searches.len()
"\n\nThese searches failed because they didn't match any strings:"
)?;
for replace in self.bad_searches {
writeln!(
&mut output,
"## No exact match in: {}\n```\n{}\n```\n",
replace.file_path, replace.search,
"- '{}' does not appear in `{}`",
replace.search.replace("\r", "\\r").replace("\n", "\\n"),
replace.file_path
)?;
}
write!(&mut output,
"The SEARCH section must exactly match an existing block of lines including all white \
space, comments, indentation, docstrings, etc."
)?;
write!(&mut output, "Make sure to use exact searches.")?;
}
if !errors.is_empty() {
writeln!(
&mut output,
"\n\n# {} SEARCH/REPLACE blocks failed to parse:",
errors.len()
"\n\nThese SEARCH/REPLACE blocks failed to parse:"
)?;
for error in errors {
@@ -384,22 +404,10 @@ impl EditToolRequest {
}
}
if changed_buffer_count > 0 {
writeln!(
&mut output,
"\n\nThe other SEARCH/REPLACE blocks were applied successfully. Do not re-send them!",
)?;
}
writeln!(
&mut output,
"{}You can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them.",
if changed_buffer_count == 0 {
"\n\n"
} else {
""
}
"\nYou can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them."
)?;
Err(anyhow!(output))

View File

@@ -1,525 +0,0 @@
use language::{BufferSnapshot, Diff, Point, ToOffset};
use project::search::SearchQuery;
use util::{paths::PathMatcher, ResultExt as _};
/// Performs an exact string replacement in a buffer, requiring precise character-for-character matching.
/// Uses the search functionality to locate the first occurrence of the exact string.
/// Returns None if no exact match is found in the buffer.
pub async fn replace_exact(old: &str, new: &str, snapshot: &BufferSnapshot) -> Option<Diff> {
let query = SearchQuery::text(
old,
false,
true,
true,
PathMatcher::new(&[]).ok()?,
PathMatcher::new(&[]).ok()?,
None,
)
.log_err()?;
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return None;
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
};
Some(diff)
}
/// Performs a replacement that's indentation-aware - matches text content ignoring leading whitespace differences.
/// When replacing, preserves the indentation level found in the buffer at each matching line.
/// Returns None if no match found or if indentation is offset inconsistently across matched lines.
pub fn replace_with_flexible_indent(old: &str, new: &str, buffer: &BufferSnapshot) -> Option<Diff> {
let (old_lines, old_min_indent) = lines_with_min_indent(old);
let (new_lines, new_min_indent) = lines_with_min_indent(new);
let min_indent = old_min_indent.min(new_min_indent);
let old_lines = drop_lines_prefix(&old_lines, min_indent);
let new_lines = drop_lines_prefix(&new_lines, min_indent);
let max_row = buffer.max_point().row;
'windows: for start_row in 0..max_row.saturating_sub(old_lines.len() as u32 - 1) {
let mut common_leading = None;
let end_row = start_row + old_lines.len() as u32 - 1;
if end_row > max_row {
// The buffer ends before fully matching the pattern
return None;
}
let start_point = Point::new(start_row, 0);
let end_point = Point::new(end_row, buffer.line_len(end_row));
let range = start_point.to_offset(buffer)..end_point.to_offset(buffer);
let window_text = buffer.text_for_range(range.clone());
let mut window_lines = window_text.lines();
let mut old_lines_iter = old_lines.iter();
while let (Some(window_line), Some(old_line)) = (window_lines.next(), old_lines_iter.next())
{
let line_trimmed = window_line.trim_start();
if line_trimmed != old_line.trim_start() {
continue 'windows;
}
if line_trimmed.is_empty() {
continue;
}
let line_leading = &window_line[..window_line.len() - old_line.len()];
match &common_leading {
Some(common_leading) if common_leading != line_leading => {
continue 'windows;
}
Some(_) => (),
None => common_leading = Some(line_leading.to_string()),
}
}
if let Some(common_leading) = common_leading {
let line_ending = buffer.line_ending();
let replacement = new_lines
.iter()
.map(|new_line| {
if new_line.trim().is_empty() {
new_line.to_string()
} else {
common_leading.to_string() + new_line
}
})
.collect::<Vec<_>>()
.join(line_ending.as_str());
let diff = Diff {
base_version: buffer.version().clone(),
line_ending,
edits: vec![(range, replacement.into())],
};
return Some(diff);
}
}
None
}
fn drop_lines_prefix<'a>(lines: &'a [&str], prefix_len: usize) -> Vec<&'a str> {
lines
.iter()
.map(|line| line.get(prefix_len..).unwrap_or(""))
.collect()
}
fn lines_with_min_indent(input: &str) -> (Vec<&str>, usize) {
let mut lines = Vec::new();
let mut min_indent: Option<usize> = None;
for line in input.lines() {
lines.push(line);
if !line.trim().is_empty() {
let indent = line.len() - line.trim_start().len();
min_indent = Some(min_indent.map_or(indent, |m| m.min(indent)));
}
}
(lines, min_indent.unwrap_or(0))
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::prelude::*;
use gpui::TestAppContext;
use unindent::Unindent;
#[gpui::test]
fn test_replace_consistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
let y = 10;
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("New value: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 42;
println!("New value: {}", x);
let y = 10;
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_inconsistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if condition {
println!("{}", 43);
}
}
"#
.unindent();
let old = r#"
if condition {
println!("{}", 43);
"#
.unindent();
let new = r#"
if condition {
println!("{}", 42);
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_with_empty_lines(cx: &mut TestAppContext) {
// Test with empty lines
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("New x: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 10;
println!("New x: {}", x);
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_no_match(cx: &mut TestAppContext) {
// Test with no match
let whole = r#"
fn test() {
let x = 5;
}
"#
.unindent();
let old = r#"
let y = 10;
"#
.unindent();
let new = r#"
let y = 20;
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_whole_ends_before_matching_old(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("x = {}", x);
"#
.unindent();
// Should return None because whole doesn't fully contain the old text
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[test]
fn test_lines_with_min_indent() {
// Empty string
assert_eq!(lines_with_min_indent(""), (vec![], 0));
// Single line without indentation
assert_eq!(lines_with_min_indent("hello"), (vec!["hello"], 0));
// Multiple lines with no indentation
assert_eq!(
lines_with_min_indent("line1\nline2\nline3"),
(vec!["line1", "line2", "line3"], 0)
);
// Multiple lines with consistent indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Multiple lines with varying indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Lines with mixed indentation and empty lines
assert_eq!(
lines_with_min_indent(" line1\n\n line2"),
(vec![" line1", "", " line2"], 2)
);
}
#[gpui::test]
fn test_replace_with_missing_indent_uneven_match(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if true {
let x = 5;
println!("x = {}", x);
}
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("x = {}", x);
"#
.unindent();
let expected = r#"
fn test() {
if true {
let x = 42;
println!("x = {}", x);
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_big_example(cx: &mut TestAppContext) {
let whole = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
}
"#
.unindent();
let old = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
"#
.unindent();
let new = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
"#
.unindent();
let expected = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[test]
fn test_drop_lines_prefix() {
// Empty array
assert_eq!(drop_lines_prefix(&[], 2), Vec::<&str>::new());
// Zero prefix length
assert_eq!(
drop_lines_prefix(&["line1", "line2"], 0),
vec!["line1", "line2"]
);
// Normal prefix drop
assert_eq!(
drop_lines_prefix(&[" line1", " line2"], 2),
vec!["line1", "line2"]
);
// Prefix longer than some lines
assert_eq!(drop_lines_prefix(&[" line1", "a"], 2), vec!["line1", ""]);
// Prefix longer than all lines
assert_eq!(drop_lines_prefix(&["a", "b"], 5), vec!["", ""]);
// Mixed length lines
assert_eq!(
drop_lines_prefix(&[" line1", " line2", " line3"], 2),
vec![" line1", "line2", " line3"]
);
}
fn test_replace_with_flexible_indent(
cx: &mut TestAppContext,
whole: &str,
old: &str,
new: &str,
) -> Option<String> {
// Create a local buffer with the test content
let buffer = cx.new(|cx| language::Buffer::local(whole, cx));
// Get the buffer snapshot
let buffer_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
// Call replace_flexible and transform the result
replace_with_flexible_indent(old, new, &buffer_snapshot).map(|diff| {
buffer.update(cx, |buffer, cx| {
let _ = buffer.apply_diff(diff, cx);
buffer.text()
})
})
}
}

View File

@@ -70,30 +70,39 @@ impl Tool for ReadFileTool {
return Task::ready(Err(anyhow!("Path not found in project")));
};
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let buffer = cx
.update(|cx| {
project.update(cx, |project, cx| project.open_buffer(project_path, cx))
})?
.await?;
let result = buffer.read_with(cx, |buffer, _cx| {
let text = buffer.text();
if input.start_line.is_some() || input.end_line.is_some() {
let start = input.start_line.unwrap_or(1);
let lines = text.split('\n').skip(start - 1);
if let Some(end) = input.end_line {
let count = end.saturating_sub(start);
Itertools::intersperse(lines.take(count), "\n").collect()
let result = buffer.read_with(&cx, |buffer, _cx| {
if buffer
.file()
.map_or(false, |file| file.disk_state().exists())
{
let text = buffer.text();
let string = if input.start_line.is_some() || input.end_line.is_some() {
let start = input.start_line.unwrap_or(1);
let lines = text.split('\n').skip(start - 1);
if let Some(end) = input.end_line {
let count = end.saturating_sub(start);
Itertools::intersperse(lines.take(count), "\n").collect()
} else {
Itertools::intersperse(lines, "\n").collect()
}
} else {
Itertools::intersperse(lines, "\n").collect()
}
} else {
text
}
})?;
text
};
action_log.update(cx, |log, cx| {
Ok(string)
} else {
Err(anyhow!("File does not exist"))
}
})??;
action_log.update(&mut cx, |log, cx| {
log.buffer_read(buffer, cx);
})?;

View File

@@ -73,7 +73,7 @@ impl Tool for RegexSearchTool {
let results = project.update(cx, |project, cx| project.search(query, cx));
cx.spawn(async move|cx| {
cx.spawn(|cx| async move {
futures::pin_mut!(results);
let mut output = String::new();
@@ -86,7 +86,7 @@ impl Tool for RegexSearchTool {
continue;
}
buffer.read_with(cx, |buffer, cx| -> Result<(), anyhow::Error> {
buffer.read_with(&cx, |buffer, cx| -> Result<(), anyhow::Error> {
if let Some(path) = buffer.file().map(|file| file.full_path(cx)) {
let mut file_header_written = false;
let mut ranges = ranges

View File

@@ -252,9 +252,11 @@ impl AutoUpdater {
}
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(async move |this, cx| loop {
this.update(cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
cx.spawn(|this, mut cx| async move {
loop {
this.update(&mut cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
}
})
}
@@ -265,9 +267,9 @@ impl AutoUpdater {
cx.notify();
self.pending_poll = Some(cx.spawn(async move |this, cx| {
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
let result = Self::update(this.upgrade()?, cx.clone()).await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
log::error!("auto-update failed: error:{:?}", error);

View File

@@ -64,7 +64,7 @@ fn view_release_notes_locally(
workspace
.with_local_workspace(window, cx, move |_, window, cx| {
cx.spawn_in(window, async move |workspace, cx| {
cx.spawn_in(window, |workspace, mut cx| async move {
let markdown = markdown.await.log_err();
let response = client.get(&url, Default::default(), true).await;
let Some(mut response) = response.log_err() else {
@@ -79,7 +79,7 @@ fn view_release_notes_locally(
if let Ok(body) = body {
workspace
.update_in(cx, |workspace, window, cx| {
.update_in(&mut cx, |workspace, window, cx| {
let project = workspace.project().clone();
let buffer = project.update(cx, |project, cx| {
project.create_local_buffer("", markdown, cx)
@@ -130,7 +130,7 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
return;
};
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
cx.update(|cx| {

View File

@@ -1080,12 +1080,12 @@ impl BufferDiff {
let complete_on_drop = util::defer(|| {
tx.send(()).ok();
});
cx.spawn(async move |_, cx| {
cx.spawn(|_, mut cx| async move {
let snapshot = snapshot.await;
let Some(this) = this.upgrade() else {
return;
};
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.set_state(snapshot, &buffer);
})
.log_err();

View File

@@ -54,10 +54,10 @@ impl OneAtATime {
{
let (tx, rx) = oneshot::channel();
self.cancel.replace(tx);
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
futures::select_biased! {
_ = rx.fuse() => Ok(None),
result = f(cx.clone()).fuse() => result.map(Some),
result = f(cx).fuse() => result.map(Some),
}
})
}
@@ -192,19 +192,19 @@ impl ActiveCall {
};
let invite = if let Some(room) = room {
cx.spawn(async move |_, cx| {
cx.spawn(move |_, mut cx| async move {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
room.update(cx, |room, cx| room.share_project(initial_project, cx))?
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
.await?,
)
} else {
None
};
room.update(cx, move |room, cx| {
room.update(&mut cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
})?
.await?;
@@ -215,7 +215,7 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let room = cx
.spawn(async move |this, cx| {
.spawn(move |this, mut cx| async move {
let create_room = async {
let room = cx
.update(|cx| {
@@ -229,14 +229,14 @@ impl ActiveCall {
})?
.await?;
this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?;
anyhow::Ok(room)
};
let room = create_room.await;
this.update(cx, |this, _| this.pending_room_creation = None)?;
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new)
})
.shared();
@@ -247,10 +247,10 @@ impl ActiveCall {
})
};
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = invite.await;
if result.is_ok() {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.report_call_event("Participant Invited", cx)
})?;
} else {
@@ -258,7 +258,7 @@ impl ActiveCall {
log::error!("invite failed: {:?}", result);
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_invites.remove(&called_user_id);
cx.notify();
})?;
@@ -315,11 +315,11 @@ impl ActiveCall {
._join_debouncer
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.report_call_event("Incoming Call Accepted", cx)
})?;
Ok(())
@@ -363,11 +363,13 @@ impl ActiveCall {
Room::join_channel(channel_id, client, user_store, cx).await
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(cx, |this, cx| this.report_call_event("Channel Joined", cx))?;
this.update(&mut cx, |this, cx| {
this.report_call_event("Channel Joined", cx)
})?;
Ok(room)
})
}

View File

@@ -128,11 +128,7 @@ impl Room {
let maintain_connection = cx.spawn({
let client = client.clone();
async move |this, cx| {
Self::maintain_connection(this, client.clone(), cx)
.log_err()
.await
}
move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
});
Audio::play_sound(Sound::Joined, cx);
@@ -176,7 +172,7 @@ impl Room {
user_store: Entity<UserStore>,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(move |mut cx| async move {
let response = client.request(proto::CreateRoom {}).await?;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -196,7 +192,7 @@ impl Room {
let initial_project_id = if let Some(initial_project) = initial_project {
let initial_project_id = room
.update(cx, |room, cx| {
.update(&mut cx, |room, cx| {
room.share_project(initial_project.clone(), cx)
})?
.await?;
@@ -206,7 +202,7 @@ impl Room {
};
let did_join = room
.update(cx, |room, cx| {
.update(&mut cx, |room, cx| {
room.leave_when_empty = true;
room.call(called_user_id, initial_project_id, cx)
})?
@@ -362,7 +358,7 @@ impl Room {
async fn maintain_connection(
this: WeakEntity<Self>,
client: Arc<Client>,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) -> Result<()> {
let mut client_status = client.status();
loop {
@@ -374,7 +370,7 @@ impl Room {
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
})?;
@@ -390,7 +386,7 @@ impl Room {
log::info!("client reconnected, attempting to rejoin room");
let Some(this) = this.upgrade() else { break };
match this.update(cx, |this, cx| this.rejoin(cx)) {
match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
Ok(task) => {
if task.await.log_err().is_some() {
return true;
@@ -439,7 +435,7 @@ impl Room {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, leaving room");
this.update(cx, |this, cx| this.leave(cx))?.await?;
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
}
Err(anyhow!(
"can't reconnect to room: client failed to re-establish connection"
@@ -494,12 +490,12 @@ impl Room {
rejoined_projects,
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)?;
@@ -581,7 +577,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
let role = role.into();
cx.spawn(async move |_, _| {
cx.spawn(|_, _| async move {
client
.request(proto::SetRoomParticipantRole {
room_id,
@@ -713,11 +709,11 @@ impl Room {
user_store.get_users(pending_participant_user_ids, cx),
)
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let (remote_participants, pending_participants) =
futures::join!(remote_participants, pending_participants);
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.participant_user_ids.clear();
if let Some(participant) = local_participant {
@@ -1120,7 +1116,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
self.pending_call_count += 1;
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = client
.request(proto::Call {
room_id,
@@ -1128,7 +1124,7 @@ impl Room {
initial_project_id,
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_call_count -= 1;
if this.should_leave() {
this.leave(cx).detach_and_log_err(cx);
@@ -1149,11 +1145,11 @@ impl Room {
let client = self.client.clone();
let user_store = self.user_store.clone();
cx.emit(Event::RemoteProjectJoined { project_id: id });
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let project =
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade() {
!project.read(cx).is_disconnected(cx)
@@ -1182,13 +1178,15 @@ impl Room {
is_ssh_project: project.read(cx).is_via_ssh(),
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = request.await?;
project.update(cx, |project, cx| project.shared(response.project_id, cx))??;
project.update(&mut cx, |project, cx| {
project.shared(response.project_id, cx)
})??;
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.shared_projects.insert(project.downgrade());
let active_project = this.local_participant.active_project.as_ref();
if active_project.map_or(false, |location| *location == project) {
@@ -1344,7 +1342,7 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
let publication = participant
@@ -1357,7 +1355,7 @@ impl Room {
)
.await
.map_err(|error| anyhow!("failed to publish track: {error}"));
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1430,7 +1428,7 @@ impl Room {
let sources = cx.screen_capture_sources();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let sources = sources.await??;
let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
@@ -1448,7 +1446,7 @@ impl Room {
.await
.map_err(|error| anyhow!("error publishing screen track {error:?}"));
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1641,7 +1639,7 @@ fn spawn_room_connection(
cx: &mut Context<'_, Room>,
) {
if let Some(connection_info) = livekit_connection_info {
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let (room, mut events) = livekit::Room::connect(
&connection_info.server_url,
&connection_info.token,
@@ -1649,11 +1647,11 @@ fn spawn_room_connection(
)
.await?;
this.update(cx, |this, cx| {
let _handle_updates = cx.spawn(async move |this, cx| {
this.update(&mut cx, |this, cx| {
let _handle_updates = cx.spawn(|this, mut cx| async move {
while let Some(event) = events.recv().await {
if this
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
this.livekit_room_updated(event, cx).warn_on_err();
})
.is_err()

View File

@@ -47,10 +47,10 @@ impl OneAtATime {
{
let (tx, rx) = oneshot::channel();
self.cancel.replace(tx);
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
futures::select_biased! {
_ = rx.fuse() => Ok(None),
result = f(cx.clone()).fuse() => result.map(Some),
result = f(cx).fuse() => result.map(Some),
}
})
}
@@ -185,19 +185,19 @@ impl ActiveCall {
};
let invite = if let Some(room) = room {
cx.spawn(async move |_, cx| {
cx.spawn(move |_, mut cx| async move {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
room.update(cx, |room, cx| room.share_project(initial_project, cx))?
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
.await?,
)
} else {
None
};
room.update(cx, move |room, cx| {
room.update(&mut cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
})?
.await?;
@@ -208,7 +208,7 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let room = cx
.spawn(async move |this, cx| {
.spawn(move |this, mut cx| async move {
let create_room = async {
let room = cx
.update(|cx| {
@@ -222,14 +222,14 @@ impl ActiveCall {
})?
.await?;
this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?;
anyhow::Ok(room)
};
let room = create_room.await;
this.update(cx, |this, _| this.pending_room_creation = None)?;
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new)
})
.shared();
@@ -240,10 +240,10 @@ impl ActiveCall {
})
};
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = invite.await;
if result.is_ok() {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.report_call_event("Participant Invited", cx)
})?;
} else {
@@ -251,7 +251,7 @@ impl ActiveCall {
log::error!("invite failed: {:?}", result);
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_invites.remove(&called_user_id);
cx.notify();
})?;
@@ -304,15 +304,15 @@ impl ActiveCall {
let room_id = call.room_id;
let client = self.client.clone();
let user_store = self.user_store.clone();
let join = self._join_debouncer.spawn(cx, move |mut cx| async move {
Room::join(room_id, client, user_store, &mut cx).await
});
let join = self
._join_debouncer
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.report_call_event("Incoming Call Accepted", cx)
})?;
Ok(())
@@ -352,15 +352,17 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let join = self._join_debouncer.spawn(cx, move |mut cx| async move {
Room::join_channel(channel_id, client, user_store, &mut cx).await
let join = self._join_debouncer.spawn(cx, move |cx| async move {
Room::join_channel(channel_id, client, user_store, cx).await
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(cx, |this, cx| this.report_call_event("Channel Joined", cx))?;
this.update(&mut cx, |this, cx| {
this.report_call_event("Channel Joined", cx)
})?;
Ok(room)
})
}

View File

@@ -115,7 +115,7 @@ impl Room {
let mut status = room.status();
// Consume the initial status of the room.
let _ = status.try_recv();
let _maintain_room = cx.spawn(async move |this, cx| {
let _maintain_room = cx.spawn(|this, mut cx| async move {
while let Some(status) = status.next().await {
let this = if let Some(this) = this.upgrade() {
this
@@ -124,7 +124,8 @@ impl Room {
};
if status == livekit_client_macos::ConnectionState::Disconnected {
this.update(cx, |this, cx| this.leave(cx).log_err()).ok();
this.update(&mut cx, |this, cx| this.leave(cx).log_err())
.ok();
break;
}
}
@@ -132,7 +133,7 @@ impl Room {
let _handle_updates = cx.spawn({
let room = room.clone();
async move |this, cx| {
move |this, mut cx| async move {
let mut updates = room.updates();
while let Some(update) = updates.next().await {
let this = if let Some(this) = this.upgrade() {
@@ -141,7 +142,7 @@ impl Room {
break;
};
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.live_kit_room_updated(update, cx).log_err()
})
.ok();
@@ -150,9 +151,9 @@ impl Room {
});
let connect = room.connect(&connection_info.server_url, &connection_info.token);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
connect.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if this.can_use_microphone() {
if let Some(live_kit) = &this.live_kit {
if !live_kit.muted_by_user && !live_kit.deafened {
@@ -183,11 +184,7 @@ impl Room {
let maintain_connection = cx.spawn({
let client = client.clone();
async move |this, cx| {
Self::maintain_connection(this, client.clone(), cx)
.log_err()
.await
}
move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
});
Audio::play_sound(Sound::Joined, cx);
@@ -231,7 +228,7 @@ impl Room {
user_store: Entity<UserStore>,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(move |mut cx| async move {
let response = client.request(proto::CreateRoom {}).await?;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -251,7 +248,7 @@ impl Room {
let initial_project_id = if let Some(initial_project) = initial_project {
let initial_project_id = room
.update(cx, |room, cx| {
.update(&mut cx, |room, cx| {
room.share_project(initial_project.clone(), cx)
})?
.await?;
@@ -261,7 +258,7 @@ impl Room {
};
let did_join = room
.update(cx, |room, cx| {
.update(&mut cx, |room, cx| {
room.leave_when_empty = true;
room.call(called_user_id, initial_project_id, cx)
})?
@@ -277,7 +274,7 @@ impl Room {
channel_id: ChannelId,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> Result<Entity<Self>> {
Self::from_join_response(
client
@@ -295,7 +292,7 @@ impl Room {
room_id: u64,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> Result<Entity<Self>> {
Self::from_join_response(
client.request(proto::JoinRoom { id: room_id }).await?,
@@ -336,7 +333,7 @@ impl Room {
response: proto::JoinRoomResponse,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) -> Result<Entity<Self>> {
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -349,7 +346,7 @@ impl Room {
cx,
)
})?;
room.update(cx, |room, cx| {
room.update(&mut cx, |room, cx| {
room.leave_when_empty = room.channel_id.is_none();
room.apply_room_update(room_proto, cx)?;
anyhow::Ok(())
@@ -417,7 +414,7 @@ impl Room {
async fn maintain_connection(
this: WeakEntity<Self>,
client: Arc<Client>,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) -> Result<()> {
let mut client_status = client.status();
loop {
@@ -429,7 +426,7 @@ impl Room {
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
})?;
@@ -445,7 +442,7 @@ impl Room {
log::info!("client reconnected, attempting to rejoin room");
let Some(this) = this.upgrade() else { break };
match this.update(cx, |this, cx| this.rejoin(cx)) {
match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
Ok(task) => {
if task.await.log_err().is_some() {
return true;
@@ -494,7 +491,7 @@ impl Room {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, leaving room");
this.update(cx, |this, cx| this.leave(cx))?.await?;
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
}
Err(anyhow!(
"can't reconnect to room: client failed to re-establish connection"
@@ -549,12 +546,12 @@ impl Room {
rejoined_projects,
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)?;
@@ -636,7 +633,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
let role = role.into();
cx.spawn(async move |_, _| {
cx.spawn(|_, _| async move {
client
.request(proto::SetRoomParticipantRole {
room_id,
@@ -739,11 +736,11 @@ impl Room {
)
});
self.pending_room_update = Some(cx.spawn(async move |this, cx| {
self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
let (remote_participants, pending_participants) =
futures::join!(remote_participants, pending_participants);
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.participant_user_ids.clear();
if let Some(participant) = local_participant {
@@ -1139,7 +1136,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
self.pending_call_count += 1;
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = client
.request(proto::Call {
room_id,
@@ -1147,7 +1144,7 @@ impl Room {
initial_project_id,
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.pending_call_count -= 1;
if this.should_leave() {
this.leave(cx).detach_and_log_err(cx);
@@ -1168,11 +1165,11 @@ impl Room {
let client = self.client.clone();
let user_store = self.user_store.clone();
cx.emit(Event::RemoteProjectJoined { project_id: id });
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let project =
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade() {
!project.read(cx).is_disconnected(cx)
@@ -1201,13 +1198,15 @@ impl Room {
is_ssh_project: project.read(cx).is_via_ssh(),
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let response = request.await?;
project.update(cx, |project, cx| project.shared(response.project_id, cx))??;
project.update(&mut cx, |project, cx| {
project.shared(response.project_id, cx)
})??;
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.shared_projects.insert(project.downgrade());
let active_project = this.local_participant.active_project.as_ref();
if active_project.map_or(false, |location| *location == project) {
@@ -1349,12 +1348,12 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let publish_track = async {
let track = LocalAudioTrack::create();
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, _| {
.update(&mut cx, |this, _| {
this.live_kit
.as_ref()
.map(|live_kit| live_kit.room.publish_audio_track(track))
@@ -1365,7 +1364,7 @@ impl Room {
let publication = publish_track.await;
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1425,7 +1424,7 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let publish_track = async {
let displays = displays.await?;
let display = displays
@@ -1434,7 +1433,7 @@ impl Room {
let track = LocalVideoTrack::screen_share_for_display(display);
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, _| {
.update(&mut cx, |this, _| {
this.live_kit
.as_ref()
.map(|live_kit| live_kit.room.publish_video_track(track))
@@ -1446,7 +1445,7 @@ impl Room {
let publication = publish_track.await;
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(cx, |this, cx| {
.update(&mut cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()

View File

@@ -47,7 +47,7 @@ impl ChannelBuffer {
client: Arc<Client>,
user_store: Entity<UserStore>,
channel_store: Entity<ChannelStore>,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) -> Result<Entity<Self>> {
let response = client
.request(proto::JoinChannelBuffer {
@@ -66,7 +66,7 @@ impl ChannelBuffer {
let capability = channel_store.read(cx).channel_capability(channel.id);
language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
})?;
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
let subscription = client.subscribe_to_entity(channel.id.0)?;
@@ -208,7 +208,7 @@ impl ChannelBuffer {
let client = self.client.clone();
let epoch = self.epoch();
self.acknowledge_task = Some(cx.spawn(async move |_, cx| {
self.acknowledge_task = Some(cx.spawn(move |_, cx| async move {
cx.background_executor()
.timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL)
.await;

View File

@@ -106,7 +106,7 @@ impl ChannelChat {
channel_store: Entity<ChannelStore>,
user_store: Entity<UserStore>,
client: Arc<Client>,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) -> Result<Entity<Self>> {
let channel_id = channel.id;
let subscription = client.subscribe_to_entity(channel_id.0).unwrap();
@@ -132,7 +132,7 @@ impl ChannelChat {
last_acknowledged_id: None,
rng: StdRng::from_entropy(),
first_loaded_message_id: None,
_subscription: subscription.set_entity(&cx.entity(), &cx.to_async()),
_subscription: subscription.set_entity(&cx.entity(), &mut cx.to_async()),
}
})?;
Self::handle_loaded_messages(
@@ -141,7 +141,7 @@ impl ChannelChat {
client,
response.messages,
response.done,
cx,
&mut cx,
)
.await?;
Ok(handle)
@@ -205,7 +205,7 @@ impl ChannelChat {
let outgoing_messages_lock = self.outgoing_messages_lock.clone();
// todo - handle messages that fail to send (e.g. >1024 chars)
Ok(cx.spawn(async move |this, cx| {
Ok(cx.spawn(move |this, mut cx| async move {
let outgoing_message_guard = outgoing_messages_lock.lock().await;
let request = rpc.request(proto::SendChannelMessage {
channel_id: channel_id.0,
@@ -218,8 +218,8 @@ impl ChannelChat {
drop(outgoing_message_guard);
let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
let id = response.id;
let message = ChannelMessage::from_proto(response, &user_store, cx).await?;
this.update(cx, |this, cx| {
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
if this.first_loaded_message_id.is_none() {
this.first_loaded_message_id = Some(id);
@@ -234,9 +234,9 @@ impl ChannelChat {
channel_id: self.channel_id.0,
message_id: id,
});
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
response.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.message_removed(id, cx);
})?;
Ok(())
@@ -266,7 +266,7 @@ impl ChannelChat {
nonce: Some(nonce.into()),
mentions: mentions_to_proto(&message.mentions),
});
Ok(cx.spawn(async move |_, _| {
Ok(cx.spawn(move |_, _| async move {
request.await?;
Ok(())
}))
@@ -281,7 +281,7 @@ impl ChannelChat {
let user_store = self.user_store.clone();
let channel_id = self.channel_id;
let before_message_id = self.first_loaded_message_id()?;
Some(cx.spawn(async move |this, cx| {
Some(cx.spawn(move |this, mut cx| {
async move {
let response = rpc
.request(proto::GetChannelMessages {
@@ -295,14 +295,13 @@ impl ChannelChat {
rpc,
response.messages,
response.done,
cx,
&mut cx,
)
.await?;
anyhow::Ok(())
}
.log_err()
.await
}))
}
@@ -440,7 +439,7 @@ impl ChannelChat {
let user_store = self.user_store.clone();
let rpc = self.rpc.clone();
let channel_id = self.channel_id;
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| {
async move {
let response = rpc
.request(proto::JoinChannelChat {
@@ -453,11 +452,11 @@ impl ChannelChat {
rpc.clone(),
response.messages,
response.done,
cx,
&mut cx,
)
.await?;
let pending_messages = this.update(cx, |this, _| {
let pending_messages = this.update(&mut cx, |this, _| {
this.pending_messages().cloned().collect::<Vec<_>>()
})?;
@@ -473,10 +472,10 @@ impl ChannelChat {
let message = ChannelMessage::from_proto(
response.message.ok_or_else(|| anyhow!("invalid message"))?,
&user_store,
cx,
&mut cx,
)
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
})?;
}
@@ -484,7 +483,6 @@ impl ChannelChat {
anyhow::Ok(())
}
.log_err()
.await
})
.detach();
}

View File

@@ -164,22 +164,22 @@ impl ChannelStore {
let mut connection_status = client.status();
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
let watch_connection_status = cx.spawn(async move |this, cx| {
let watch_connection_status = cx.spawn(|this, mut cx| async move {
while let Some(status) = connection_status.next().await {
let this = this.upgrade()?;
match status {
client::Status::Connected { .. } => {
this.update(cx, |this, cx| this.handle_connect(cx))
this.update(&mut cx, |this, cx| this.handle_connect(cx))
.ok()?
.await
.log_err()?;
}
client::Status::SignedOut | client::Status::UpgradeRequired => {
this.update(cx, |this, cx| this.handle_disconnect(false, cx))
this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx))
.ok();
}
_ => {
this.update(cx, |this, cx| this.handle_disconnect(true, cx))
this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx))
.ok();
}
}
@@ -200,12 +200,13 @@ impl ChannelStore {
_rpc_subscriptions: rpc_subscriptions,
_watch_connection_status: watch_connection_status,
disconnect_channel_buffers_task: None,
_update_channels: cx.spawn(async move |this, cx| {
_update_channels: cx.spawn(|this, mut cx| async move {
maybe!(async move {
while let Some(update_channels) = update_channels_rx.next().await {
if let Some(this) = this.upgrade() {
let update_task = this
.update(cx, |this, cx| this.update_channels(update_channels, cx))?;
let update_task = this.update(&mut cx, |this, cx| {
this.update_channels(update_channels, cx)
})?;
if let Some(update_task) = update_task {
update_task.await.log_err();
}
@@ -309,9 +310,7 @@ impl ChannelStore {
self.open_channel_resource(
channel_id,
|this| &mut this.opened_buffers,
async move |channel, cx| {
ChannelBuffer::new(channel, client, user_store, channel_store, cx).await
},
|channel, cx| ChannelBuffer::new(channel, client, user_store, channel_store, cx),
cx,
)
}
@@ -329,14 +328,14 @@ impl ChannelStore {
.request(proto::GetChannelMessagesById { message_ids }),
)
};
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
if let Some(request) = request {
let response = request.await?;
let this = this
.upgrade()
.ok_or_else(|| anyhow!("channel store dropped"))?;
let user_store = this.update(cx, |this, _| this.user_store.clone())?;
ChannelMessage::from_proto_vec(response.messages, &user_store, cx).await
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await
} else {
Ok(Vec::new())
}
@@ -441,7 +440,7 @@ impl ChannelStore {
self.open_channel_resource(
channel_id,
|this| &mut this.opened_chats,
async move |channel, cx| ChannelChat::new(channel, this, user_store, client, cx).await,
|channel, cx| ChannelChat::new(channel, this, user_store, client, cx),
cx,
)
}
@@ -451,7 +450,7 @@ impl ChannelStore {
/// Make sure that the resource is only opened once, even if this method
/// is called multiple times with the same channel id while the first task
/// is still running.
fn open_channel_resource<T, F>(
fn open_channel_resource<T, F, Fut>(
&mut self,
channel_id: ChannelId,
get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenEntityHandle<T>>,
@@ -459,7 +458,8 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<Entity<T>>>
where
F: AsyncFnOnce(Arc<Channel>, &mut AsyncApp) -> Result<Entity<T>> + 'static,
F: 'static + FnOnce(Arc<Channel>, AsyncApp) -> Fut,
Fut: Future<Output = Result<Entity<T>>>,
T: 'static,
{
let task = loop {
@@ -479,8 +479,8 @@ impl ChannelStore {
},
hash_map::Entry::Vacant(e) => {
let task = cx
.spawn(async move |this, cx| {
let channel = this.update(cx, |this, _| {
.spawn(move |this, mut cx| async move {
let channel = this.update(&mut cx, |this, _| {
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
Arc::new(anyhow!("no channel for id: {}", channel_id))
})
@@ -493,9 +493,9 @@ impl ChannelStore {
e.insert(OpenEntityHandle::Loading(task.clone()));
cx.spawn({
let task = task.clone();
async move |this, cx| {
move |this, mut cx| async move {
let result = task.await;
this.update(cx, |this, _| match result {
this.update(&mut cx, |this, _| match result {
Ok(model) => {
get_map(this).insert(
channel_id,
@@ -570,7 +570,7 @@ impl ChannelStore {
) -> Task<Result<ChannelId>> {
let client = self.client.clone();
let name = name.trim_start_matches('#').to_owned();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let response = client
.request(proto::CreateChannel {
name,
@@ -583,7 +583,7 @@ impl ChannelStore {
.ok_or_else(|| anyhow!("missing channel in response"))?;
let channel_id = ChannelId(channel.id);
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let task = this.update_channels(
proto::UpdateChannels {
channels: vec![channel],
@@ -611,7 +611,7 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(async move |_, _| {
cx.spawn(move |_, _| async move {
let _ = client
.request(proto::MoveChannel {
channel_id: channel_id.0,
@@ -630,7 +630,7 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(async move |_, _| {
cx.spawn(move |_, _| async move {
let _ = client
.request(proto::SetChannelVisibility {
channel_id: channel_id.0,
@@ -655,7 +655,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = client
.request(proto::InviteChannelMember {
channel_id: channel_id.0,
@@ -664,7 +664,7 @@ impl ChannelStore {
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -687,7 +687,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = client
.request(proto::RemoveChannelMember {
channel_id: channel_id.0,
@@ -695,7 +695,7 @@ impl ChannelStore {
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -717,7 +717,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let result = client
.request(proto::SetChannelMemberRole {
channel_id: channel_id.0,
@@ -726,7 +726,7 @@ impl ChannelStore {
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -744,7 +744,7 @@ impl ChannelStore {
) -> Task<Result<()>> {
let client = self.client.clone();
let name = new_name.to_string();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let channel = client
.request(proto::RenameChannel {
channel_id: channel_id.0,
@@ -753,7 +753,7 @@ impl ChannelStore {
.await?
.channel
.ok_or_else(|| anyhow!("missing channel in response"))?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let task = this.update_channels(
proto::UpdateChannels {
channels: vec![channel],
@@ -799,7 +799,7 @@ impl ChannelStore {
) -> Task<Result<Vec<ChannelMembership>>> {
let client = self.client.clone();
let user_store = self.user_store.downgrade();
cx.spawn(async move |_, cx| {
cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::GetChannelMembers {
channel_id: channel_id.0,
@@ -807,7 +807,7 @@ impl ChannelStore {
limit: limit as u64,
})
.await?;
user_store.update(cx, |user_store, _| {
user_store.update(&mut cx, |user_store, _| {
user_store.insert(response.users);
response
.members
@@ -931,10 +931,10 @@ impl ChannelStore {
buffers: buffer_versions,
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let mut response = response.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.opened_buffers.retain(|_, buffer| match buffer {
OpenEntityHandle::Open(channel_buffer) => {
let Some(channel_buffer) = channel_buffer.upgrade() else {
@@ -1006,13 +1006,13 @@ impl ChannelStore {
cx.notify();
self.did_subscribe = false;
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
if wait_for_reconnect {
cx.background_executor().timer(RECONNECT_TIMEOUT).await;
}
if let Some(this) = this.upgrade() {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
for (_, buffer) in this.opened_buffers.drain() {
if let OpenEntityHandle::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade() {
@@ -1136,10 +1136,10 @@ impl ChannelStore {
let users = self
.user_store
.update(cx, |user_store, cx| user_store.get_users(all_user_ids, cx));
Some(cx.spawn(async move |this, cx| {
Some(cx.spawn(|this, mut cx| async move {
let users = users.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
for entry in &channel_participants {
let mut participants: Vec<_> = entry
.participant_user_ids

View File

@@ -144,9 +144,9 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignIn, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
client.authenticate_and_connect(true, &cx).log_err().await
})
cx.spawn(
|cx| async move { client.authenticate_and_connect(true, &cx).log_err().await },
)
.detach();
}
}
@@ -156,7 +156,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignOut, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
client.sign_out(&cx).await;
})
.detach();
@@ -168,7 +168,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &Reconnect, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
client.reconnect(&cx);
})
.detach();
@@ -640,7 +640,7 @@ impl Client {
}
Status::ConnectionLost => {
let this = self.clone();
state._reconnect_task = Some(cx.spawn(async move |cx| {
state._reconnect_task = Some(cx.spawn(move |cx| async move {
#[cfg(any(test, feature = "test-support"))]
let mut rng = StdRng::seed_from_u64(0);
#[cfg(not(any(test, feature = "test-support")))]
@@ -964,11 +964,13 @@ impl Client {
cx.spawn({
let this = self.clone();
async move |cx| {
while let Some(message) = incoming.next().await {
this.handle_message(message, &cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
|cx| {
async move {
while let Some(message) = incoming.next().await {
this.handle_message(message, &cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
}
}
}
})
@@ -976,20 +978,22 @@ impl Client {
cx.spawn({
let this = self.clone();
async move |cx| match handle_io.await {
Ok(()) => {
if *this.status().borrow()
== (Status::Connected {
connection_id,
peer_id,
})
{
this.set_status(Status::SignedOut, &cx);
move |cx| async move {
match handle_io.await {
Ok(()) => {
if *this.status().borrow()
== (Status::Connected {
connection_id,
peer_id,
})
{
this.set_status(Status::SignedOut, &cx);
}
}
Err(err) => {
log::error!("connection error: {:?}", err);
this.set_status(Status::ConnectionLost, &cx);
}
}
Err(err) => {
log::error!("connection error: {:?}", err);
this.set_status(Status::ConnectionLost, &cx);
}
}
})
@@ -1174,12 +1178,12 @@ impl Client {
pub fn authenticate_with_browser(self: &Arc<Self>, cx: &AsyncApp) -> Task<Result<Credentials>> {
let http = self.http.clone();
let this = self.clone();
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
let background = cx.background_executor().clone();
let (open_url_tx, open_url_rx) = oneshot::channel::<String>();
cx.update(|cx| {
cx.spawn(async move |cx| {
cx.spawn(move |cx| async move {
let url = open_url_rx.await?;
cx.update(|cx| cx.open_url(&url))
})
@@ -1541,23 +1545,25 @@ impl Client {
original_sender_id,
type_name
);
cx.spawn(async move |_| match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
client_id,
original_sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
original_sender_id,
type_name,
error
);
cx.spawn(move |_| async move {
match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
client_id,
original_sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
original_sender_id,
type_name,
error
);
}
}
})
.detach();

View File

@@ -44,7 +44,7 @@ impl FakeServer {
let state = Arc::downgrade(&server.state);
move |cx| {
let state = state.clone();
cx.spawn(async move |_| {
cx.spawn(move |_| async move {
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
let mut state = state.lock();
state.auth_count += 1;
@@ -63,7 +63,7 @@ impl FakeServer {
let peer = peer.clone();
let state = state.clone();
let credentials = credentials.clone();
cx.spawn(async move |cx| {
cx.spawn(move |cx| async move {
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
if state.lock().forbid_connections {

View File

@@ -168,10 +168,11 @@ impl UserStore {
invite_info: None,
client: Arc::downgrade(&client),
update_contacts_tx,
_maintain_contacts: cx.spawn(async move |this, cx| {
_maintain_contacts: cx.spawn(|this, mut cx| async move {
let _subscriptions = rpc_subscriptions;
while let Some(message) = update_contacts_rx.next().await {
if let Ok(task) = this.update(cx, |this, cx| this.update_contacts(message, cx))
if let Ok(task) =
this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
{
task.log_err().await;
} else {
@@ -179,7 +180,7 @@ impl UserStore {
}
}
}),
_maintain_current_user: cx.spawn(async move |this, cx| {
_maintain_current_user: cx.spawn(|this, mut cx| async move {
let mut status = client.status();
let weak = Arc::downgrade(&client);
drop(client);
@@ -191,9 +192,10 @@ impl UserStore {
match status {
Status::Connected { .. } => {
if let Some(user_id) = client.user_id() {
let fetch_user = if let Ok(fetch_user) =
this.update(cx, |this, cx| this.get_user(user_id, cx).log_err())
{
let fetch_user = if let Ok(fetch_user) = this
.update(&mut cx, |this, cx| {
this.get_user(user_id, cx).log_err()
}) {
fetch_user
} else {
break;
@@ -237,12 +239,12 @@ impl UserStore {
current_user_tx.send(user).await.ok();
this.update(cx, |_, cx| cx.notify())?;
this.update(&mut cx, |_, cx| cx.notify())?;
}
}
Status::SignedOut => {
current_user_tx.send(None).await.ok();
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.accepted_tos_at = None;
cx.emit(Event::PrivateUserInfoUpdated);
cx.notify();
@@ -251,7 +253,7 @@ impl UserStore {
.await;
}
Status::ConnectionLost => {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
cx.notify();
this.clear_contacts()
})?
@@ -348,7 +350,7 @@ impl UserStore {
user_ids.extend(message.outgoing_requests.iter());
let load_users = self.get_users(user_ids.into_iter().collect(), cx);
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
load_users.await?;
// Users are fetched in parallel above and cached in call to get_users
@@ -358,22 +360,25 @@ impl UserStore {
.upgrade()
.ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
for contact in message.contacts {
updated_contacts
.push(Arc::new(Contact::from_proto(contact, &this, cx).await?));
updated_contacts.push(Arc::new(
Contact::from_proto(contact, &this, &mut cx).await?,
));
}
let mut incoming_requests = Vec::new();
for request in message.incoming_requests {
incoming_requests.push({
this.update(cx, |this, cx| this.get_user(request.requester_id, cx))?
.await?
this.update(&mut cx, |this, cx| {
this.get_user(request.requester_id, cx)
})?
.await?
});
}
let mut outgoing_requests = Vec::new();
for requested_user_id in message.outgoing_requests {
outgoing_requests.push(
this.update(cx, |this, cx| this.get_user(requested_user_id, cx))?
this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))?
.await?,
);
}
@@ -385,7 +390,7 @@ impl UserStore {
let removed_outgoing_requests =
HashSet::<u64>::from_iter(message.remove_outgoing_requests.iter().copied());
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
// Remove contacts
this.contacts
.retain(|contact| !removed_contacts.contains(&contact.user.id));
@@ -538,7 +543,7 @@ impl UserStore {
cx: &Context<Self>,
) -> Task<Result<()>> {
let client = self.client.upgrade();
cx.spawn(async move |_, _| {
cx.spawn(move |_, _| async move {
client
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
.request(proto::RespondToContactRequest {
@@ -560,12 +565,12 @@ impl UserStore {
*self.pending_contact_requests.entry(user_id).or_insert(0) += 1;
cx.notify();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
let response = client
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
.request(request)
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Entry::Occupied(mut request_count) =
this.pending_contact_requests.entry(user_id)
{
@@ -609,9 +614,9 @@ impl UserStore {
let mut user_ids_to_fetch = user_ids.clone();
user_ids_to_fetch.retain(|id| !self.users.contains_key(id));
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
if !user_ids_to_fetch.is_empty() {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.load_users(
proto::GetUsers {
user_ids: user_ids_to_fetch,
@@ -622,7 +627,7 @@ impl UserStore {
.await?;
}
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
user_ids
.iter()
.map(|user_id| {
@@ -663,9 +668,9 @@ impl UserStore {
}
let load_users = self.get_users(vec![user_id], cx);
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
load_users.await?;
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.users
.get(&user_id)
.cloned()
@@ -703,14 +708,14 @@ impl UserStore {
};
let client = self.client.clone();
cx.spawn(async move |this, cx| {
cx.spawn(move |this, mut cx| async move {
if let Some(client) = client.upgrade() {
let response = client
.request(proto::AcceptTermsOfService {})
.await
.context("error accepting tos")?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
cx.emit(Event::PrivateUserInfoUpdated);
})
@@ -732,12 +737,12 @@ impl UserStore {
cx: &Context<Self>,
) -> Task<Result<Vec<Arc<User>>>> {
let client = self.client.clone();
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
if let Some(rpc) = client.upgrade() {
let response = rpc.request(request).await.context("error loading users")?;
let users = response.users;
this.update(cx, |this, _| this.insert(users))
this.update(&mut cx, |this, _| this.insert(users))
} else {
Ok(Vec::new())
}
@@ -791,8 +796,8 @@ impl UserStore {
}
if !missing_user_ids.is_empty() {
let this = self.weak_self.clone();
cx.spawn(async move |cx| {
this.update(cx, |this, cx| this.get_users(missing_user_ids, cx))?
cx.spawn(|mut cx| async move {
this.update(&mut cx, |this, cx| this.get_users(missing_user_ids, cx))?
.await
})
.detach_and_log_err(cx);

View File

@@ -660,6 +660,10 @@ fn for_snowflake(
e.event_type.clone(),
serde_json::to_value(&e.event_properties).unwrap(),
),
Event::AssistantThreadFeedback(e) => (
"Assistant Feedback".to_string(),
serde_json::to_value(&e).unwrap(),
),
};
if let serde_json::Value::Object(ref mut map) = event_properties {

View File

@@ -562,7 +562,7 @@ async fn test_channel_buffers_and_server_restarts(
deterministic.run_until_parked();
// Client C can't reconnect.
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
// Server stops.
server.reset().await;

View File

@@ -103,6 +103,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
}),
)
.await;
client_a.fs().recalculate_git_status(Path::new("/a/.git"));
cx_b.run_until_parked();
project_b.update(cx_b, |project, cx| {

View File

@@ -14,6 +14,8 @@ use client::{User, RECEIVE_TIMEOUT};
use collections::{HashMap, HashSet};
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::{channel::mpsc, StreamExt as _};
use prompt_store::PromptBuilder;
use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode};
use gpui::{
px, size, App, BackgroundExecutor, Entity, Modifiers, MouseButton, MouseDownEvent,
@@ -28,13 +30,11 @@ use language::{
};
use lsp::LanguageServerId;
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use project::{
lsp_store::{FormatTrigger, LspFormatTarget},
search::{SearchQuery, SearchResult},
DiagnosticSummary, HoverBlockKind, Project, ProjectPath,
};
use prompt_store::PromptBuilder;
use rand::prelude::*;
use serde_json::json;
use settings::SettingsStore;
@@ -983,7 +983,7 @@ async fn test_server_restarts(
server.reset().await;
// Users A and B reconnect to the call. User C has troubles reconnecting, so it leaves the room.
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
@@ -1156,9 +1156,9 @@ async fn test_server_restarts(
server.reset().await;
// Users A and B have troubles reconnecting, so they leave the room.
client_a.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_b.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_a.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_b.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
@@ -2623,13 +2623,13 @@ async fn test_git_diff_base_change(
});
// Create remote buffer
let remote_buffer_a = project_remote
let buffer_remote_a = project_remote
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_a = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(remote_buffer_a.clone(), cx)
p.open_unstaged_diff(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
@@ -2637,7 +2637,7 @@ async fn test_git_diff_base_change(
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_a.read(cx);
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
@@ -2653,13 +2653,13 @@ async fn test_git_diff_base_change(
// Open uncommitted changes on the guest, without opening them on the host first
let remote_uncommitted_diff_a = project_remote
.update(cx_b, |p, cx| {
p.open_uncommitted_diff(remote_buffer_a.clone(), cx)
p.open_uncommitted_diff(buffer_remote_a.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_a.read(cx);
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(committed_text.as_str())
@@ -2703,9 +2703,8 @@ async fn test_git_diff_base_change(
);
});
// Guest receives index text update
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_a.read(cx);
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
@@ -2719,7 +2718,7 @@ async fn test_git_diff_base_change(
});
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_a.read(cx);
let buffer = buffer_remote_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_committed_text.as_str())
@@ -2784,20 +2783,20 @@ async fn test_git_diff_base_change(
});
// Create remote buffer
let remote_buffer_b = project_remote
let buffer_remote_b = project_remote
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_b = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(remote_buffer_b.clone(), cx)
p.open_unstaged_diff(buffer_remote_b.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_b.read(cx);
let buffer = buffer_remote_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
@@ -2833,7 +2832,7 @@ async fn test_git_diff_base_change(
});
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = remote_buffer_b.read(cx);
let buffer = buffer_remote_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
@@ -2958,38 +2957,15 @@ async fn test_git_status_sync(
.insert_tree(
"/dir",
json!({
".git": {},
"a.txt": "a",
"b.txt": "b",
"c.txt": "c",
".git": {},
"a.txt": "a",
"b.txt": "b",
}),
)
.await;
// Initially, a.txt is uncommitted, but present in the index,
// and b.txt is unmerged.
client_a.fs().set_head_for_repo(
"/dir/.git".as_ref(),
&[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())],
);
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
&[
("a.txt".into(), "".into()),
("b.txt".into(), "B".into()),
("c.txt".into(), "c".into()),
],
);
client_a.fs().set_unmerged_paths_for_repo(
"/dir/.git".as_ref(),
&[(
"b.txt".into(),
UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
second_head: UnmergedStatusCode::Deleted,
},
)],
);
const A_TXT: &str = "a.txt";
const B_TXT: &str = "b.txt";
const A_STATUS_START: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
@@ -3000,6 +2976,14 @@ async fn test_git_status_sync(
second_head: UnmergedStatusCode::Deleted,
});
client_a.fs().set_status_for_repo_via_git_operation(
Path::new("/dir/.git"),
&[
(Path::new(A_TXT), A_STATUS_START),
(Path::new(B_TXT), B_STATUS_START),
],
);
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| {
@@ -3015,7 +2999,7 @@ async fn test_git_status_sync(
#[track_caller]
fn assert_status(
file: impl AsRef<Path>,
file: &impl AsRef<Path>,
status: Option<FileStatus>,
project: &Project,
cx: &App,
@@ -3029,15 +3013,13 @@ async fn test_git_status_sync(
}
project_local.read_with(cx_a, |project, cx| {
assert_status("a.txt", Some(A_STATUS_START), project, cx);
assert_status("b.txt", Some(B_STATUS_START), project, cx);
assert_status("c.txt", None, project, cx);
assert_status(&Path::new(A_TXT), Some(A_STATUS_START), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_START), project, cx);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status("a.txt", Some(A_STATUS_START), project, cx);
assert_status("b.txt", Some(B_STATUS_START), project, cx);
assert_status("c.txt", None, project, cx);
assert_status(&Path::new(A_TXT), Some(A_STATUS_START), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_START), project, cx);
});
const A_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
@@ -3046,42 +3028,30 @@ async fn test_git_status_sync(
});
const B_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Added,
});
const C_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Modified,
worktree_status: StatusCode::Unmodified,
});
// Delete b.txt from the index, mark conflict as resolved,
// and modify c.txt in the working copy.
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
&[("a.txt".into(), "a".into()), ("c.txt".into(), "c".into())],
client_a.fs().set_status_for_repo_via_working_copy_change(
Path::new("/dir/.git"),
&[
(Path::new(A_TXT), A_STATUS_END),
(Path::new(B_TXT), B_STATUS_END),
],
);
client_a
.fs()
.set_unmerged_paths_for_repo("/dir/.git".as_ref(), &[]);
client_a
.fs()
.atomic_write("/dir/c.txt".into(), "CC".into())
.await
.unwrap();
// Wait for buffer_local_a to receive it
executor.run_until_parked();
// Smoke test status reading
project_local.read_with(cx_a, |project, cx| {
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
});
// And synchronization while joining
@@ -3089,9 +3059,8 @@ async fn test_git_status_sync(
executor.run_until_parked();
project_remote_c.read_with(cx_c, |project, cx| {
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
});
}

View File

@@ -128,6 +128,7 @@ enum GitOperation {
WriteGitStatuses {
repo_path: PathBuf,
statuses: Vec<(PathBuf, FileStatus)>,
git_operation: bool,
},
}
@@ -986,6 +987,7 @@ impl RandomizedTest for ProjectCollaborationTest {
GitOperation::WriteGitStatuses {
repo_path,
statuses,
git_operation,
} => {
if !client.fs().directories(false).contains(&repo_path) {
return Err(TestError::Inapplicable);
@@ -1014,9 +1016,17 @@ impl RandomizedTest for ProjectCollaborationTest {
client.fs().create_dir(&dot_git_dir).await?;
}
client
.fs()
.set_status_for_repo(&dot_git_dir, statuses.as_slice());
if git_operation {
client.fs().set_status_for_repo_via_git_operation(
&dot_git_dir,
statuses.as_slice(),
);
} else {
client.fs().set_status_for_repo_via_working_copy_change(
&dot_git_dir,
statuses.as_slice(),
);
}
}
},
}
@@ -1445,13 +1455,18 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
}
64..=100 => {
let file_paths = generate_file_paths(&repo_path, rng, client);
let statuses = file_paths
.into_iter()
.map(|path| (path, gen_status(rng)))
.collect::<Vec<_>>();
let git_operation = rng.gen::<bool>();
GitOperation::WriteGitStatuses {
repo_path,
statuses,
git_operation,
}
}
_ => unreachable!(),
@@ -1590,24 +1605,15 @@ fn gen_file_name(rng: &mut StdRng) -> String {
}
fn gen_status(rng: &mut StdRng) -> FileStatus {
fn gen_tracked_status(rng: &mut StdRng) -> TrackedStatus {
match rng.gen_range(0..3) {
0 => TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Unmodified,
},
1 => TrackedStatus {
index_status: StatusCode::Modified,
worktree_status: StatusCode::Modified,
},
2 => TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Modified,
},
3 => TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Unmodified,
},
fn gen_status_code(rng: &mut StdRng) -> StatusCode {
match rng.gen_range(0..7) {
0 => StatusCode::Modified,
1 => StatusCode::TypeChanged,
2 => StatusCode::Added,
3 => StatusCode::Deleted,
4 => StatusCode::Renamed,
5 => StatusCode::Copied,
6 => StatusCode::Unmodified,
_ => unreachable!(),
}
}
@@ -1621,12 +1627,17 @@ fn gen_status(rng: &mut StdRng) -> FileStatus {
}
}
match rng.gen_range(0..2) {
0 => FileStatus::Unmerged(UnmergedStatus {
match rng.gen_range(0..4) {
0 => FileStatus::Untracked,
1 => FileStatus::Ignored,
2 => FileStatus::Unmerged(UnmergedStatus {
first_head: gen_unmerged_status_code(rng),
second_head: gen_unmerged_status_code(rng),
}),
1 => FileStatus::Tracked(gen_tracked_status(rng)),
3 => FileStatus::Tracked(TrackedStatus {
index_status: gen_status_code(rng),
worktree_status: gen_status_code(rng),
}),
_ => unreachable!(),
}
}

View File

@@ -208,8 +208,8 @@ impl TestServer {
.unwrap()
.set_id(user_id.to_proto())
.override_authenticate(move |cx| {
let access_token = "the-token".to_string();
cx.spawn(async move |_| {
cx.spawn(|_| async move {
let access_token = "the-token".to_string();
Ok(Credentials {
user_id: user_id.to_proto(),
access_token,
@@ -230,7 +230,7 @@ impl TestServer {
let connection_killers = connection_killers.clone();
let forbid_connections = forbid_connections.clone();
let client_name = client_name.clone();
cx.spawn(async move |cx| {
cx.spawn(move |cx| async move {
if forbid_connections.load(SeqCst) {
Err(EstablishConnectionError::other(anyhow!(
"server is forbidding connections"

View File

@@ -64,9 +64,9 @@ impl ChannelView {
window,
cx,
);
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let channel_view = channel_view.await?;
pane.update_in(cx, |pane, window, cx| {
pane.update_in(&mut cx, |pane, window, cx| {
telemetry::event!(
"Channel Notes Opened",
channel_id,
@@ -90,10 +90,10 @@ impl ChannelView {
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
let channel_view = Self::load(channel_id, workspace, window, cx);
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let channel_view = channel_view.await?;
pane.update_in(cx, |pane, window, cx| {
pane.update_in(&mut cx, |pane, window, cx| {
let buffer_id = channel_view.read(cx).channel_buffer.read(cx).remote_id(cx);
let existing_view = pane
@@ -166,11 +166,11 @@ impl ChannelView {
let channel_buffer =
channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let channel_buffer = channel_buffer.await?;
let markdown = markdown.await.log_err();
channel_buffer.update(cx, |channel_buffer, cx| {
channel_buffer.update(&mut cx, |channel_buffer, cx| {
channel_buffer.buffer().update(cx, |buffer, cx| {
buffer.set_language_registry(language_registry);
let Some(markdown) = markdown else {
@@ -583,10 +583,10 @@ impl FollowableItem for ChannelView {
let open = ChannelView::load(ChannelId(state.channel_id), workspace, window, cx);
Some(window.spawn(cx, async move |cx| {
Some(window.spawn(cx, |mut cx| async move {
let this = open.await?;
let task = this.update_in(cx, |this, window, cx| {
let task = this.update_in(&mut cx, |this, window, cx| {
this.remote_id = Some(remote_id);
if let Some(state) = state.editor {

View File

@@ -199,7 +199,7 @@ impl ChatPanel {
workspace: WeakEntity<Workspace>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let serialized_panel = if let Some(panel) = cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(CHAT_PANEL_KEY) })
.await
@@ -211,7 +211,7 @@ impl ChatPanel {
None
};
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
let panel = Self::new(workspace, window, cx);
if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| {
@@ -867,10 +867,10 @@ impl ChatPanel {
})
});
cx.spawn(async move |this, cx| {
cx.spawn(|this, mut cx| async move {
let chat = open_chat.await?;
let highlight_message_id = scroll_to_message_id;
let scroll_to_message_id = this.update(cx, |this, cx| {
let scroll_to_message_id = this.update(&mut cx, |this, cx| {
this.set_active_chat(chat.clone(), cx);
scroll_to_message_id.or(this.last_acknowledged_message_id)
@@ -881,11 +881,11 @@ impl ChatPanel {
ChannelChat::load_history_since_message(chat.clone(), message_id, cx.clone())
.await
{
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let Some(highlight_message_id) = highlight_message_id {
let task = cx.spawn(async move |this, cx| {
let task = cx.spawn(|this, mut cx| async move {
cx.background_executor().timer(Duration::from_secs(2)).await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.highlighted_message.take();
cx.notify();
})

View File

@@ -137,9 +137,11 @@ impl MessageEditor {
.detach();
let markdown = language_registry.language_for_name("Markdown");
cx.spawn_in(window, async move |_, cx| {
cx.spawn_in(window, |_, mut cx| async move {
let markdown = markdown.await.context("failed to load Markdown language")?;
buffer.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx))
buffer.update(&mut cx, |buffer, cx| {
buffer.set_language(Some(markdown), cx)
})
})
.detach_and_log_err(cx);
@@ -230,7 +232,7 @@ impl MessageEditor {
) {
if let language::BufferEvent::Reparsed | language::BufferEvent::Edited = event {
let buffer = buffer.read(cx).snapshot();
self.mentions_task = Some(cx.spawn_in(window, async move |this, cx| {
self.mentions_task = Some(cx.spawn_in(window, |this, cx| async move {
cx.background_executor()
.timer(MENTIONS_DEBOUNCE_INTERVAL)
.await;
@@ -249,7 +251,7 @@ impl MessageEditor {
self.collect_mention_candidates(buffer, end_anchor, cx)
{
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
return cx.spawn(|_, cx| async move {
Ok(Some(
Self::resolve_completions_for_candidates(
&cx,
@@ -268,7 +270,7 @@ impl MessageEditor {
self.collect_emoji_candidates(buffer, end_anchor, cx)
{
if !candidates.is_empty() {
return cx.spawn(async move |_, cx| {
return cx.spawn(|_, cx| async move {
Ok(Some(
Self::resolve_completions_for_candidates(
&cx,
@@ -451,7 +453,7 @@ impl MessageEditor {
async fn find_mentions(
this: WeakEntity<MessageEditor>,
buffer: BufferSnapshot,
cx: &mut AsyncWindowContext,
mut cx: AsyncWindowContext,
) {
let (buffer, ranges) = cx
.background_spawn(async move {
@@ -460,7 +462,7 @@ impl MessageEditor {
})
.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let mut anchor_ranges = Vec::new();
let mut mentioned_user_ids = Vec::new();
let mut text = String::new();

View File

@@ -1569,9 +1569,9 @@ impl CollabPanel {
channel_store.create_channel(&channel_name, *location, cx)
});
if location.is_none() {
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
let channel_id = create.await?;
this.update_in(cx, |this, window, cx| {
this.update_in(&mut cx, |this, window, cx| {
this.show_channel_modal(
channel_id,
channel_modal::Mode::InviteMembers,
@@ -1944,8 +1944,8 @@ impl CollabPanel {
let user_store = self.user_store.clone();
let channel_store = self.channel_store.clone();
cx.spawn_in(window, async move |_, cx| {
workspace.update_in(cx, |workspace, window, cx| {
cx.spawn_in(window, |_, mut cx| async move {
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.toggle_modal(window, cx, |window, cx| {
ChannelModal::new(
user_store.clone(),
@@ -1976,11 +1976,11 @@ impl CollabPanel {
&["Leave", "Cancel"],
cx,
);
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
if answer.await? != 0 {
return Ok(());
}
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.channel_store.update(cx, |channel_store, cx| {
channel_store.remove_member(channel_id, user_id, cx)
})
@@ -2009,13 +2009,13 @@ impl CollabPanel {
&["Remove", "Cancel"],
cx,
);
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
if answer.await? == 0 {
channel_store
.update(cx, |channels, _| channels.remove_channel(channel_id))?
.update(&mut cx, |channels, _| channels.remove_channel(channel_id))?
.await
.notify_async_err(cx);
this.update_in(cx, |_, window, cx| cx.focus_self(window))
.notify_async_err(&mut cx);
this.update_in(&mut cx, |_, window, cx| cx.focus_self(window))
.ok();
}
anyhow::Ok(())
@@ -2043,12 +2043,12 @@ impl CollabPanel {
&["Remove", "Cancel"],
cx,
);
cx.spawn_in(window, async move |_, cx| {
cx.spawn_in(window, |_, mut cx| async move {
if answer.await? == 0 {
user_store
.update(cx, |store, cx| store.remove_contact(user_id, cx))?
.update(&mut cx, |store, cx| store.remove_contact(user_id, cx))?
.await
.notify_async_err(cx);
.notify_async_err(&mut cx);
}
anyhow::Ok(())
})
@@ -2161,11 +2161,11 @@ impl CollabPanel {
.full_width()
.on_click(cx.listener(|this, _, window, cx| {
let client = this.client.clone();
cx.spawn_in(window, async move |_, cx| {
cx.spawn_in(window, |_, mut cx| async move {
client
.authenticate_and_connect(true, &cx)
.await
.notify_async_err(cx);
.notify_async_err(&mut cx);
})
.detach()
})),

View File

@@ -300,9 +300,9 @@ impl PickerDelegate for ChannelModalDelegate {
cx.background_executor().clone(),
));
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
picker
.update(cx, |picker, cx| {
.update(&mut cx, |picker, cx| {
let delegate = &mut picker.delegate;
delegate.matching_member_indices.clear();
delegate
@@ -316,10 +316,10 @@ impl PickerDelegate for ChannelModalDelegate {
let search_members = self.channel_store.update(cx, |store, cx| {
store.fuzzy_search_members(self.channel_id, query.clone(), 100, cx)
});
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
async {
let members = search_members.await?;
picker.update(cx, |picker, cx| {
picker.update(&mut cx, |picker, cx| {
picker.delegate.has_all_members =
query.is_empty() && members.len() < 100;
picker.delegate.matching_member_indices =
@@ -338,10 +338,10 @@ impl PickerDelegate for ChannelModalDelegate {
let search_users = self
.user_store
.update(cx, |store, cx| store.fuzzy_search_users(query, cx));
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
async {
let users = search_users.await?;
picker.update(cx, |picker, cx| {
picker.update(&mut cx, |picker, cx| {
picker.delegate.matching_users = users;
cx.notify();
})?;
@@ -489,9 +489,9 @@ impl ChannelModalDelegate {
let update = self.channel_store.update(cx, |store, cx| {
store.set_member_role(self.channel_id, user_id, new_role, cx)
});
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
update.await?;
picker.update_in(cx, |picker, window, cx| {
picker.update_in(&mut cx, |picker, window, cx| {
let this = &mut picker.delegate;
if let Some(member) = this.members.iter_mut().find(|m| m.user.id == user_id) {
member.role = new_role;
@@ -513,9 +513,9 @@ impl ChannelModalDelegate {
let update = self.channel_store.update(cx, |store, cx| {
store.remove_member(self.channel_id, user_id, cx)
});
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
update.await?;
picker.update_in(cx, |picker, window, cx| {
picker.update_in(&mut cx, |picker, window, cx| {
let this = &mut picker.delegate;
if let Some(ix) = this.members.iter_mut().position(|m| m.user.id == user_id) {
this.members.remove(ix);
@@ -551,10 +551,10 @@ impl ChannelModalDelegate {
store.invite_member(self.channel_id, user.id, ChannelRole::Member, cx)
});
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
invite_member.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
let new_member = ChannelMembership {
user,
kind: proto::channel_member::Kind::Invitee,

View File

@@ -102,10 +102,10 @@ impl PickerDelegate for ContactFinderDelegate {
.user_store
.update(cx, |store, cx| store.fuzzy_search_users(query, cx));
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, |picker, mut cx| async move {
async {
let potential_contacts = search_users.await?;
picker.update(cx, |picker, cx| {
picker.update(&mut cx, |picker, cx| {
picker.delegate.potential_contacts = potential_contacts.into();
cx.notify();
})?;

View File

@@ -96,10 +96,10 @@ impl NotificationPanel {
cx.new(|cx| {
let mut status = client.status();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
while (status.next().await).is_some() {
if this
.update(cx, |_: &mut Self, cx| {
.update(&mut cx, |_: &mut Self, cx| {
cx.notify();
})
.is_err()
@@ -181,7 +181,7 @@ impl NotificationPanel {
workspace: WeakEntity<Workspace>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let serialized_panel = if let Some(panel) = cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(NOTIFICATION_PANEL_KEY) })
.await
@@ -193,7 +193,7 @@ impl NotificationPanel {
None
};
workspace.update_in(cx, |workspace, window, cx| {
workspace.update_in(&mut cx, |workspace, window, cx| {
let panel = Self::new(workspace, window, cx);
if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| {
@@ -445,12 +445,12 @@ impl NotificationPanel {
.entry(notification_id)
.or_insert_with(|| {
let client = self.client.clone();
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
cx.background_executor().timer(MARK_AS_READ_DELAY).await;
client
.request(proto::MarkNotificationRead { notification_id })
.await?;
this.update(cx, |this, _| {
this.update(&mut cx, |this, _| {
this.mark_as_read_tasks.remove(&notification_id);
})?;
Ok(())
@@ -556,9 +556,9 @@ impl NotificationPanel {
let notification_id = entry.id;
self.current_notification_toast = Some((
notification_id,
cx.spawn_in(window, async move |this, cx| {
cx.spawn_in(window, |this, mut cx| async move {
cx.background_executor().timer(TOAST_DURATION).await;
this.update(cx, |this, cx| this.remove_toast(notification_id, cx))
this.update(&mut cx, |this, cx| this.remove_toast(notification_id, cx))
.ok();
}),
));
@@ -643,7 +643,7 @@ impl Render for NotificationPanel {
move |_, window, cx| {
let client = client.clone();
window
.spawn(cx, async move |cx| {
.spawn(cx, move |cx| async move {
client
.authenticate_and_connect(true, &cx)
.log_err()

View File

@@ -12,12 +12,12 @@ use workspace::AppState;
pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
let app_state = Arc::downgrade(app_state);
let mut incoming_call = ActiveCall::global(cx).read(cx).incoming();
cx.spawn(async move |cx| {
cx.spawn(|mut cx| async move {
let mut notification_windows: Vec<WindowHandle<IncomingCallNotification>> = Vec::new();
while let Some(incoming_call) = incoming_call.next().await {
for window in notification_windows.drain(..) {
window
.update(cx, |_, window, _| {
.update(&mut cx, |_, window, _| {
window.remove_window();
})
.log_err();
@@ -75,7 +75,7 @@ impl IncomingCallNotificationState {
let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
let app_state = self.app_state.clone();
let cx: &mut App = cx;
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
join.await?;
if let Some(project_id) = initial_project_id {
cx.update(|cx| {

View File

@@ -327,13 +327,13 @@ impl PickerDelegate for CommandPaletteDelegate {
});
self.updating_matches = Some((task, rx.clone()));
cx.spawn_in(window, async move |picker, cx| {
cx.spawn_in(window, move |picker, mut cx| async move {
let Some((commands, matches)) = rx.recv().await else {
return;
};
picker
.update(cx, |picker, cx| {
.update(&mut cx, |picker, cx| {
picker
.delegate
.matches_updated(query, commands, matches, cx)

View File

@@ -560,7 +560,7 @@ impl SerializableItem for ComponentPreview {
let user_store = project.read(cx).user_store().clone();
let language_registry = project.read(cx).languages().clone();
window.spawn(cx, async move |cx| {
window.spawn(cx, |mut cx| async move {
let user_store = user_store.clone();
let language_registry = language_registry.clone();
let weak_workspace = workspace.clone();

View File

@@ -171,17 +171,13 @@ impl Client {
let notification_handlers = notification_handlers.clone();
let response_handlers = response_handlers.clone();
let transport = transport.clone();
async move |cx| {
move |cx| {
Self::handle_input(transport, notification_handlers, response_handlers, cx)
.log_err()
.await
}
});
let stderr_input_task = cx.spawn({
let transport = transport.clone();
async move |_| Self::handle_stderr(transport).log_err().await
});
let input_task = cx.spawn(async move |_| {
let stderr_input_task = cx.spawn(|_| Self::handle_stderr(transport.clone()).log_err());
let input_task = cx.spawn(|_| async move {
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
stdout.or(stderr)
});
@@ -221,7 +217,7 @@ impl Client {
transport: Arc<dyn Transport>,
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
cx: &mut AsyncApp,
cx: AsyncApp,
) -> anyhow::Result<()> {
let mut receiver = transport.receive();

View File

@@ -41,15 +41,16 @@ impl ExtensionContextServerProxy for ContextServerFactoryRegistryProxy {
let id = id.clone();
let extension = extension.clone();
cx.spawn(async move |cx| {
let extension_project = project.update(cx, |project, cx| {
Arc::new(ExtensionProject {
worktree_ids: project
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).id().to_proto())
.collect(),
})
})?;
cx.spawn(|mut cx| async move {
let extension_project =
project.update(&mut cx, |project, cx| {
Arc::new(ExtensionProject {
worktree_ids: project
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).id().to_proto())
.collect(),
})
})?;
let command = extension
.context_server_command(id.clone(), extension_project)

View File

@@ -147,15 +147,15 @@ impl ContextServerManager {
if self.update_servers_task.is_some() {
self.needs_server_update = true;
} else {
self.update_servers_task = Some(cx.spawn(async move |this, cx| {
this.update(cx, |this, _| {
self.update_servers_task = Some(cx.spawn(|this, mut cx| async move {
this.update(&mut cx, |this, _| {
this.needs_server_update = false;
})?;
Self::maintain_servers(this.clone(), cx).await?;
Self::maintain_servers(this.clone(), cx.clone()).await?;
this.update(cx, |this, cx| {
let has_any_context_servers = !this.running_servers().is_empty();
this.update(&mut cx, |this, cx| {
let has_any_context_servers = !this.servers().is_empty();
if has_any_context_servers {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.show_namespace(CONTEXT_SERVERS_NAMESPACE);
@@ -180,44 +180,19 @@ impl ContextServerManager {
.cloned()
}
pub fn start_server(
&self,
server: Arc<ContextServer>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
cx.spawn(async move |this, cx| {
let id = server.id.clone();
server.start(&cx).await?;
this.update(cx, |_, cx| cx.emit(Event::ServerStarted { server_id: id }))?;
Ok(())
})
}
pub fn stop_server(
&self,
server: Arc<ContextServer>,
cx: &mut Context<Self>,
) -> anyhow::Result<()> {
server.stop()?;
cx.emit(Event::ServerStopped {
server_id: server.id(),
});
Ok(())
}
pub fn restart_server(
&mut self,
id: &Arc<str>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
let id = id.clone();
cx.spawn(async move |this, cx| {
if let Some(server) = this.update(cx, |this, _cx| this.servers.remove(&id))? {
cx.spawn(|this, mut cx| async move {
if let Some(server) = this.update(&mut cx, |this, _cx| this.servers.remove(&id))? {
server.stop()?;
let config = server.config();
let new_server = Arc::new(ContextServer::new(id.clone(), config));
new_server.clone().start(&cx).await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.servers.insert(id.clone(), new_server);
cx.emit(Event::ServerStopped {
server_id: id.clone(),
@@ -231,11 +206,7 @@ impl ContextServerManager {
})
}
pub fn all_servers(&self) -> Vec<Arc<ContextServer>> {
self.servers.values().cloned().collect()
}
pub fn running_servers(&self) -> Vec<Arc<ContextServer>> {
pub fn servers(&self) -> Vec<Arc<ContextServer>> {
self.servers
.values()
.filter(|server| server.client().is_some())
@@ -243,10 +214,10 @@ impl ContextServerManager {
.collect()
}
async fn maintain_servers(this: WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
async fn maintain_servers(this: WeakEntity<Self>, mut cx: AsyncApp) -> Result<()> {
let mut desired_servers = HashMap::default();
let (registry, project) = this.update(cx, |this, cx| {
let (registry, project) = this.update(&mut cx, |this, cx| {
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
settings::SettingsLocation {
worktree_id: worktree.read(cx).id(),
@@ -260,7 +231,7 @@ impl ContextServerManager {
})?;
for (id, factory) in
registry.read_with(cx, |registry, _| registry.context_server_factories())?
registry.read_with(&cx, |registry, _| registry.context_server_factories())?
{
let config = desired_servers.entry(id).or_default();
if config.command.is_none() {
@@ -273,7 +244,7 @@ impl ContextServerManager {
let mut servers_to_start = HashMap::default();
let mut servers_to_stop = HashMap::default();
this.update(cx, |this, _cx| {
this.update(&mut cx, |this, _cx| {
this.servers.retain(|id, server| {
if desired_servers.contains_key(id) {
true
@@ -299,12 +270,16 @@ impl ContextServerManager {
for (id, server) in servers_to_stop {
server.stop().log_err();
this.update(cx, |_, cx| cx.emit(Event::ServerStopped { server_id: id }))?;
this.update(&mut cx, |_, cx| {
cx.emit(Event::ServerStopped { server_id: id })
})?;
}
for (id, server) in servers_to_start {
if server.start(&cx).await.log_err().is_some() {
this.update(cx, |_, cx| cx.emit(Event::ServerStarted { server_id: id }))?;
this.update(&mut cx, |_, cx| {
cx.emit(Event::ServerStarted { server_id: id })
})?;
}
}

View File

@@ -47,13 +47,13 @@ impl StdioTransport {
let (stdout_sender, stdout_receiver) = channel::unbounded::<String>();
let (stderr_sender, stderr_receiver) = channel::unbounded::<String>();
cx.spawn(async move |_| Self::handle_output(stdin, stdout_receiver).log_err().await)
cx.spawn(|_| Self::handle_output(stdin, stdout_receiver).log_err())
.detach();
cx.spawn(async move |_| Self::handle_input(stdout, stdin_sender).await)
cx.spawn(|_| async move { Self::handle_input(stdout, stdin_sender).await })
.detach();
cx.spawn(async move |_| Self::handle_err(stderr, stderr_sender).await)
cx.spawn(|_| async move { Self::handle_err(stderr, stderr_sender).await })
.detach();
Ok(Self {

View File

@@ -226,17 +226,17 @@ impl RegisteredBuffer {
let id = buffer.entity_id();
let prev_pending_change =
mem::replace(&mut self.pending_buffer_change, Task::ready(None));
self.pending_buffer_change = cx.spawn(async move |copilot, cx| {
self.pending_buffer_change = cx.spawn(move |copilot, mut cx| async move {
prev_pending_change.await;
let old_version = copilot
.update(cx, |copilot, _| {
.update(&mut cx, |copilot, _| {
let server = copilot.server.as_authenticated().log_err()?;
let buffer = server.registered_buffers.get_mut(&id)?;
Some(buffer.snapshot.version.clone())
})
.ok()??;
let new_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()).ok()?;
let new_snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot()).ok()?;
let content_changes = cx
.background_spawn({
@@ -265,7 +265,7 @@ impl RegisteredBuffer {
.await;
copilot
.update(cx, |copilot, _| {
.update(&mut cx, |copilot, _| {
let server = copilot.server.as_authenticated().log_err()?;
let buffer = server.registered_buffers.get_mut(&id)?;
if !content_changes.is_empty() {
@@ -388,7 +388,7 @@ impl Copilot {
let node_runtime = self.node_runtime.clone();
let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn(async move |this, cx| {
.spawn(move |this, cx| {
Self::start_language_server(
server_id,
http,
@@ -398,7 +398,6 @@ impl Copilot {
awaiting_sign_in_after_start,
cx,
)
.await
})
.shared();
self.server = CopilotServer::Starting { task: start_task };
@@ -443,7 +442,7 @@ impl Copilot {
},
"copilot".into(),
Default::default(),
&mut cx.to_async(),
cx.to_async(),
);
let http = http_client::FakeHttpClient::create(|_| async { unreachable!() });
let node_runtime = NodeRuntime::unavailable();
@@ -469,7 +468,7 @@ impl Copilot {
env: Option<HashMap<String, String>>,
this: WeakEntity<Self>,
awaiting_sign_in_after_start: bool,
cx: &mut AsyncApp,
mut cx: AsyncApp,
) {
let start_language_server = async {
let server_path = get_copilot_lsp(http).await?;
@@ -496,7 +495,7 @@ impl Copilot {
root_path,
None,
Default::default(),
cx,
cx.clone(),
)?;
server
@@ -536,7 +535,7 @@ impl Copilot {
};
let server = start_language_server.await;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
cx.notify();
match server {
Ok((server, status)) => {
@@ -570,7 +569,7 @@ impl Copilot {
SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized { .. } => {
let lsp = server.lsp.clone();
let task = cx
.spawn(async move |this, cx| {
.spawn(|this, mut cx| async move {
let sign_in = async {
let sign_in = lsp
.request::<request::SignInInitiate>(
@@ -582,7 +581,7 @@ impl Copilot {
Ok(request::SignInStatus::Ok { user: Some(user) })
}
request::SignInInitiateResult::PromptUserDeviceFlow(flow) => {
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if let CopilotServer::Running(RunningCopilotServer {
sign_in_status: status,
..
@@ -611,7 +610,7 @@ impl Copilot {
};
let sign_in = sign_in.await;
this.update(cx, |this, cx| match sign_in {
this.update(&mut cx, |this, cx| match sign_in {
Ok(status) => {
this.update_sign_in_status(status, cx);
Ok(())
@@ -671,7 +670,7 @@ impl Copilot {
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
let server_id = self.server_id;
async move |this, cx| {
move |this, cx| async move {
clear_copilot_dir().await;
Self::start_language_server(server_id, http, node_runtime, env, this, false, cx)
.await

View File

@@ -241,7 +241,7 @@ impl CopilotChat {
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
let dir_path = copilot_chat_config_dir();
cx.spawn(async move |cx| {
cx.spawn(|cx| async move {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),

View File

@@ -83,7 +83,7 @@ impl EditPredictionProvider for CopilotCompletionProvider {
cx: &mut Context<Self>,
) {
let copilot = self.copilot.clone();
self.pending_refresh = Some(cx.spawn(async move |this, cx| {
self.pending_refresh = Some(cx.spawn(|this, mut cx| async move {
if debounce {
cx.background_executor()
.timer(COPILOT_DEBOUNCE_TIMEOUT)
@@ -91,12 +91,12 @@ impl EditPredictionProvider for CopilotCompletionProvider {
}
let completions = copilot
.update(cx, |copilot, cx| {
.update(&mut cx, |copilot, cx| {
copilot.completions(&buffer, cursor_position, cx)
})?
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
if !completions.is_empty() {
this.cycled = false;
this.pending_refresh = None;
@@ -153,14 +153,14 @@ impl EditPredictionProvider for CopilotCompletionProvider {
cx.notify();
} else {
let copilot = self.copilot.clone();
self.pending_cycling_refresh = Some(cx.spawn(async move |this, cx| {
self.pending_cycling_refresh = Some(cx.spawn(|this, mut cx| async move {
let completions = copilot
.update(cx, |copilot, cx| {
.update(&mut cx, |copilot, cx| {
copilot.completions_cycling(&buffer, cursor_position, cx)
})?
.await?;
this.update(cx, |this, cx| {
this.update(&mut cx, |this, cx| {
this.cycled = true;
this.file_extension = buffer.read(cx).file().and_then(|file| {
Some(

Some files were not shown because too many files have changed in this diff Show More