Compare commits
31 Commits
vConradTes
...
side-by-si
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5f7eb0c70 | ||
|
|
b15c7be652 | ||
|
|
6cde3b2502 | ||
|
|
c12c220d3e | ||
|
|
744de9cefa | ||
|
|
a75230c63f | ||
|
|
9ecc2445e2 | ||
|
|
d3c1a5fe20 | ||
|
|
8dfe7ebdb6 | ||
|
|
a1a4e8ebe5 | ||
|
|
fc448d7d44 | ||
|
|
56c70e9d1c | ||
|
|
52cdc838f5 | ||
|
|
6f3c81ab8c | ||
|
|
6ab75d3d4b | ||
|
|
34d2063456 | ||
|
|
bb56a17798 | ||
|
|
8c008cb9ae | ||
|
|
b2a8c08cef | ||
|
|
e080b3a75a | ||
|
|
f5557a1867 | ||
|
|
11a356b6e6 | ||
|
|
3614b623b8 | ||
|
|
4e1b674e39 | ||
|
|
532dce97f1 | ||
|
|
97ea848ac0 | ||
|
|
49ebf59d05 | ||
|
|
20b457de2a | ||
|
|
a63fada5f5 | ||
|
|
23adeab8d2 | ||
|
|
6e0f65e05a |
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
@@ -39,21 +39,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -56,20 +56,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
6
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -35,8 +35,10 @@ body:
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Windows: `C:\Users\YOU\AppData\Local\Zed\logs\Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
|
||||
88
.github/workflows/after_release.yml
vendored
88
.github/workflows/after_release.yml
vendored
@@ -1,88 +0,0 @@
|
||||
# Generated from xtask::workflows::after_release
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: after_release
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
jobs:
|
||||
rebuild_releases_page:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: after_release::rebuild_releases_page::refresh_cloud_releases
|
||||
run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::rebuild_releases_page::redeploy_zed_dev
|
||||
run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
|
||||
post_to_discord:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- id: get-release-url
|
||||
name: after_release::post_to_discord::get_release_url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable"
|
||||
fi
|
||||
|
||||
echo "URL=$URL" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get-content
|
||||
name: after_release::post_to_discord::get_content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
truncationSymbol: '...'
|
||||
- name: after_release::post_to_discord::discord_webhook_action
|
||||
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
publish_winget:
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- id: set-package-name
|
||||
name: after_release::publish_winget::set_package_name
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::publish_winget::winget_releaser
|
||||
uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
|
||||
with:
|
||||
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
create_sentry_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
39
.github/workflows/cherry_pick.yml
vendored
39
.github/workflows/cherry_pick.yml
vendored
@@ -1,39 +0,0 @@
|
||||
# Generated from xtask::workflows::cherry_pick
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: cherry_pick
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
commit:
|
||||
description: commit
|
||||
required: true
|
||||
type: string
|
||||
branch:
|
||||
description: branch
|
||||
required: true
|
||||
type: string
|
||||
channel:
|
||||
description: channel
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
run_cherry_pick:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: get-app-token
|
||||
name: cherry_pick::run_cherry_pick::authenticate_as_zippy
|
||||
uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1
|
||||
with:
|
||||
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
|
||||
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
|
||||
- name: cherry_pick::run_cherry_pick::cherry_pick
|
||||
run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GIT_COMMITTER_NAME: Zed Zippy
|
||||
GIT_COMMITTER_EMAIL: hi@zed.dev
|
||||
GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
|
||||
869
.github/workflows/ci.yml
vendored
Normal file
869
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,869 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
name: Decide which jobs to run
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
run_license: ${{ steps.filter.outputs.run_license }}
|
||||
run_docs: ${{ steps.filter.outputs.run_docs }}
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_actionlint: ${{ steps.filter.outputs.run_actionlint }}
|
||||
runs-on:
|
||||
- namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- name: Fetch git history and generate output filters
|
||||
id: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
# Specify anything which should potentially skip full test suite in this regex:
|
||||
# - docs/
|
||||
# - script/update_top_ranking_issues/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qvP "$SKIP_REGEX" && \
|
||||
echo "run_tests=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_tests=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^docs/' && \
|
||||
echo "run_docs=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_docs=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^\.github/(workflows/|actions/|actionlint.yml)' && \
|
||||
echo "run_actionlint=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_actionlint=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^(Cargo.lock|script/.*licenses)' && \
|
||||
echo "run_license=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_license=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' && \
|
||||
echo "$GITHUB_REF_NAME" | grep -qvP '^v[0-9]+\.[0-9]+\.[0-9x](-pre)?$' && \
|
||||
echo "run_nix=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_nix=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0 # fetch full history
|
||||
|
||||
- name: Remove untracked files
|
||||
run: git clean -df
|
||||
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/proto/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
|
||||
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Prettier Check on default.json
|
||||
run: |
|
||||
pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
|
||||
- name: Check modifier use in keymaps
|
||||
run: script/check-keymaps
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
check_docs:
|
||||
timeout-minutes: 60
|
||||
name: Check docs
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true')
|
||||
runs-on:
|
||||
- namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
actionlint:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true'
|
||||
needs: [job_spec]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download actionlint
|
||||
id: get_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash
|
||||
- name: Check workflow files
|
||||
run: ${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Check that Cargo.lock is up to date
|
||||
run: |
|
||||
cargo update --locked --workspace
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Install cargo-machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
|
||||
with:
|
||||
command: install
|
||||
args: cargo-machete@0.7.0
|
||||
|
||||
- name: Check unused dependencies
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
|
||||
with:
|
||||
command: machete
|
||||
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
fi
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4
|
||||
with:
|
||||
license-check: false
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
cargo check -p workspace
|
||||
cargo build -p remote_server
|
||||
cargo check -p gpui --examples
|
||||
|
||||
# Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build -p zed
|
||||
cargo check -p workspace
|
||||
cargo check -p gpui --examples
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
doctests:
|
||||
# Nextest currently doesn't support doctests, so run them separately and in parallel.
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run doctests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Run doctests
|
||||
run: cargo test --workspace --doc --no-fail-fast
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Clang & Mold
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build Remote Server
|
||||
run: cargo build -p remote_server
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
steps:
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
$RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE
|
||||
Write-Output `
|
||||
"RUSTUP_HOME=$RunnerDir\.rustup" `
|
||||
"CARGO_HOME=$RunnerDir\.cargo" `
|
||||
"PATH=$RunnerDir\.cargo\bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
|
||||
- name: cargo clippy
|
||||
run: |
|
||||
.\script\clippy.ps1
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests_windows
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build
|
||||
|
||||
- name: Limit target directory size
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
|
||||
tests_pass:
|
||||
name: Tests Pass
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- check_docs
|
||||
- actionlint
|
||||
- migration_checks
|
||||
# run_tests: If adding required tests, add them here and to script below.
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_tests
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
if [[ "${{ needs.job_spec.outputs.run_docs }}" == "true" ]]; then
|
||||
[[ "${{ needs.check_docs.result }}" != 'success' ]] && { RET_CODE=1; echo "docs checks failed"; }
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.job_spec.outputs.run_actionlint }}" == "true" ]]; then
|
||||
[[ "${{ needs.actionlint.result }}" != 'success' ]] && { RET_CODE=1; echo "actionlint checks failed"; }
|
||||
fi
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
|
||||
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
fi
|
||||
exit $RET_CODE
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 120
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
#
|
||||
# 25 was chosen arbitrarily.
|
||||
fetch-depth: 25
|
||||
clean: false
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Draft release notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true
|
||||
script/create-draft-release target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Rename binaries
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
run: |
|
||||
mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-macos-x86_64.gz
|
||||
target/zed-remote-server-macos-aarch64.gz
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-x86_x64:
|
||||
timeout-minutes: 60
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [linux_tests]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-aarch64: # this runs on ubuntu22.04
|
||||
timeout-minutes: 60
|
||||
name: Linux arm64 release bundle
|
||||
runs-on:
|
||||
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundles
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
if: |
|
||||
false && ( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
|
||||
path: out/zed-remote-server-freebsd-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
out/zed-remote-server-freebsd-x86_64.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
nix-build:
|
||||
name: Build with Nix
|
||||
uses: ./.github/workflows/nix.yml
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries' &&
|
||||
(contains(github.event.pull_request.labels.*.name, 'run-nix') ||
|
||||
needs.job_spec.outputs.run_nix == 'true')
|
||||
secrets: inherit
|
||||
with:
|
||||
flake-output: debug
|
||||
# excludes the final package to only cache dependencies
|
||||
cachix-filter: "-zed-editor-[0-9.]*-nightly"
|
||||
|
||||
bundle-windows-x64:
|
||||
timeout-minutes: 120
|
||||
name: Create a Windows installer
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [windows_tests]
|
||||
env:
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel.ps1
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
- name: Upload installer (x86_64) to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
|
||||
path: ${{ env.SETUP_PATH }}
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: ${{ env.SETUP_PATH }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: |
|
||||
false
|
||||
&& startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64]
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: gh release
|
||||
run: gh release edit "$GITHUB_REF_NAME" --draft=false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
with:
|
||||
environment: production
|
||||
93
.github/workflows/community_release_actions.yml
vendored
Normal file
93
.github/workflows/community_release_actions.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# IF YOU UPDATE THE NAME OF ANY GITHUB SECRET, YOU MUST CHERRY PICK THE COMMIT
|
||||
# TO BOTH STABLE AND PREVIEW CHANNELS
|
||||
|
||||
name: Release Actions
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
discord_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get release URL
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable"
|
||||
fi
|
||||
|
||||
echo "URL=$URL" >> "$GITHUB_OUTPUT"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
truncationSymbol: "..."
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164 # v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
publish-winget:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Set Package Name
|
||||
id: set-package-name
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
- uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f # v2
|
||||
with:
|
||||
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if release was promoted from preview
|
||||
id: check-promotion-from-preview
|
||||
run: |
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
PREVIEW_TAG="${VERSION}-pre"
|
||||
|
||||
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
|
||||
echo "was_promoted_from_preview=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "was_promoted_from_preview=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Send release notes email
|
||||
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
cat << 'EOF' > release_body.txt
|
||||
${{ github.event.release.body }}
|
||||
EOF
|
||||
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
|
||||
> release_data.json
|
||||
curl -X POST "https://zed.dev/api/send_release_notes_email" \
|
||||
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @release_data.json
|
||||
81
.github/workflows/compare_perf.yml
vendored
81
.github/workflows/compare_perf.yml
vendored
@@ -1,81 +0,0 @@
|
||||
# Generated from xtask::workflows::compare_perf
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: compare_perf
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
head:
|
||||
description: head
|
||||
required: true
|
||||
type: string
|
||||
base:
|
||||
description: base
|
||||
required: true
|
||||
type: string
|
||||
crate_name:
|
||||
description: crate_name
|
||||
type: string
|
||||
default: ''
|
||||
jobs:
|
||||
run_perf:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
run: cargo install hyperfine
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::cargo_perf_test
|
||||
run: |2-
|
||||
|
||||
if [ -n "${{ inputs.crate_name }}" ]; then
|
||||
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
|
||||
else
|
||||
cargo perf-test -p vim -- --json=${{ inputs.base }};
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::cargo_perf_test
|
||||
run: |2-
|
||||
|
||||
if [ -n "${{ inputs.crate_name }}" ]; then
|
||||
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
|
||||
else
|
||||
cargo perf-test -p vim -- --json=${{ inputs.head }};
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::compare_runs
|
||||
run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact results.md'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: results.md
|
||||
path: results.md
|
||||
if-no-files-found: error
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
66
.github/workflows/danger.yml
vendored
66
.github/workflows/danger.yml
vendored
@@ -1,40 +1,42 @@
|
||||
# Generated from xtask::workflows::danger
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: danger
|
||||
name: Danger
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
branches:
|
||||
- main
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
|
||||
jobs:
|
||||
danger:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_pnpm
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
with:
|
||||
version: '9'
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: pnpm
|
||||
cache-dependency-path: script/danger/pnpm-lock.yaml
|
||||
- name: danger::danger_job::install_deps
|
||||
run: pnpm install --dir script/danger
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: danger::danger_job::run
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: not_a_real_token
|
||||
DANGER_GITHUB_API_BASE_URL: https://danger-proxy.fly.dev/github
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/danger/pnpm-lock.yaml"
|
||||
|
||||
- run: pnpm install --dir script/danger
|
||||
|
||||
- name: Run Danger
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
env:
|
||||
# This GitHub token is not used, but the value needs to be here to prevent
|
||||
# Danger from throwing an error.
|
||||
GITHUB_TOKEN: "not_a_real_token"
|
||||
# All requests are instead proxied through an instance of
|
||||
# https://github.com/maxdeviant/danger-proxy that allows Danger to securely
|
||||
# authenticate with GitHub while still being able to run on PRs from forks.
|
||||
DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github"
|
||||
|
||||
2
.github/workflows/deploy_collab.yml
vendored
2
.github/workflows/deploy_collab.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 300
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
71
.github/workflows/eval.yml
vendored
Normal file
71
.github/workflows/eval.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: Run Agent Eval
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
types: [synchronize, reopened, labeled]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: 1
|
||||
|
||||
jobs:
|
||||
run_eval:
|
||||
timeout-minutes: 60
|
||||
name: Run Agent Eval
|
||||
if: >
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Compile eval
|
||||
run: cargo build --package=eval
|
||||
|
||||
- name: Run eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
69
.github/workflows/nix.yml
vendored
Normal file
69
.github/workflows/nix.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: "Nix build"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
flake-output:
|
||||
type: string
|
||||
default: "default"
|
||||
cachix-filter:
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
nix-build:
|
||||
timeout-minutes: 60
|
||||
name: (${{ matrix.system.os }}) Nix Build
|
||||
continue-on-error: true # TODO: remove when we want this to start blocking CI
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system:
|
||||
- os: x86 Linux
|
||||
runner: namespace-profile-16x32-ubuntu-2204
|
||||
install_nix: true
|
||||
- os: arm Mac
|
||||
runner: [macOS, ARM64, test]
|
||||
install_nix: false
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ${{ matrix.system.runner }}
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
# on our macs we manually install nix. for some reason the cachix action is running
|
||||
# under a non-login /bin/bash shell which doesn't source the proper script to add the
|
||||
# nix profile to PATH, so we manually add them here
|
||||
- name: Set path
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
name: zed
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
pushFilter: "${{ inputs.cachix-filter }}"
|
||||
cachixArgs: "-v"
|
||||
|
||||
- run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
|
||||
|
||||
- name: Limit /nix/store to 50GB on macs
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
489
.github/workflows/release.yml
vendored
489
.github/workflows/release.yml
vendored
@@ -1,489 +0,0 @@
|
||||
# Generated from xtask::workflows::release
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: release
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
jobs:
|
||||
run_tests_mac:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
check_scripts:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_tests::check_scripts::run_shellcheck
|
||||
run: ./script/shellcheck-scripts error
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get_actionlint
|
||||
name: run_tests::check_scripts::download_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::run_actionlint
|
||||
run: |
|
||||
${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::check_xtask_workflows
|
||||
run: |
|
||||
cargo xtask workflows
|
||||
if ! git diff --exit-code .github; then
|
||||
echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
|
||||
echo "Please run 'cargo xtask workflows' locally and commit the changes"
|
||||
exit 1
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
create_draft_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 25
|
||||
ref: ${{ github.ref }}
|
||||
- name: script/determine-release-channel
|
||||
run: script/determine-release-channel
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: mkdir -p target/
|
||||
run: mkdir -p target/
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_draft_release::generate_release_notes
|
||||
run: node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_draft_release::create_release
|
||||
run: script/create-draft-release target/release-notes.md
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
timeout-minutes: 60
|
||||
bundle_linux_aarch64:
|
||||
needs:
|
||||
- run_tests_linux
|
||||
- check_scripts
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
needs:
|
||||
- run_tests_linux
|
||||
- check_scripts
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
needs:
|
||||
- run_tests_mac
|
||||
- check_scripts
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_x86_64:
|
||||
needs:
|
||||
- run_tests_mac
|
||||
- check_scripts
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
needs:
|
||||
- run_tests_windows
|
||||
- check_scripts
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
needs:
|
||||
- run_tests_windows
|
||||
- check_scripts
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
upload_release_assets:
|
||||
needs:
|
||||
- create_draft_release
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: release::download_workflow_artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
path: ./artifacts/
|
||||
- name: ls -lR ./artifacts
|
||||
run: ls -lR ./artifacts
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::prep_release_artifacts
|
||||
run: |-
|
||||
mkdir -p release-artifacts/
|
||||
|
||||
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
|
||||
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
|
||||
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
|
||||
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
|
||||
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
|
||||
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
|
||||
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
|
||||
run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
auto_release_preview:
|
||||
needs:
|
||||
- upload_release_assets
|
||||
if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
|
||||
run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
808
.github/workflows/release_nightly.yml
vendored
808
.github/workflows/release_nightly.yml
vendored
@@ -1,495 +1,323 @@
|
||||
# Generated from xtask::workflows::release_nightly
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: release_nightly
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
name: Release Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
push:
|
||||
tags:
|
||||
- nightly
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
jobs:
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
bundle_linux_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
build_nix_linux_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::install_nix
|
||||
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#default -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
build_nix_mac_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::set_path
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#default -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::limit_store
|
||||
run: |-
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
update_nightly_tag:
|
||||
needs:
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
- name: release::download_workflow_artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
path: ./artifacts/
|
||||
- name: ls -lR ./artifacts
|
||||
run: ls -lR ./artifacts
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::prep_release_artifacts
|
||||
run: |-
|
||||
mkdir -p release-artifacts/
|
||||
- "nightly"
|
||||
|
||||
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
|
||||
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
|
||||
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
|
||||
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
|
||||
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
|
||||
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
|
||||
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/upload-nightly
|
||||
run: ./script/upload-nightly
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
- name: release_nightly::update_nightly_tag_job::update_nightly_tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
|
||||
jobs:
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and Clippy lints
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- macOS
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Run clippy
|
||||
run: ./script/clippy
|
||||
|
||||
tests:
|
||||
timeout-minutes: 60
|
||||
name: Run tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- macOS
|
||||
needs: style
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
windows-tests:
|
||||
timeout-minutes: 60
|
||||
name: Run tests on Windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests_windows
|
||||
|
||||
- name: Limit target directory size
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 1024
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
name: Create a macOS bundle
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
needs: tests
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly macos
|
||||
|
||||
bundle-linux-x86:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux *.tar.gz bundle for x86
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
needs: tests
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
bundle-linux-arm:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux *.tar.gz bundle for ARM
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
|
||||
needs: tests
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
if: false && github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
name: Build Zed on FreeBSD
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly freebsd
|
||||
|
||||
bundle-nix:
|
||||
name: Build and cache Nix package
|
||||
needs: tests
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/nix.yml
|
||||
|
||||
bundle-windows-x64:
|
||||
timeout-minutes: 60
|
||||
name: Create a Windows installer
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
needs: windows-tests
|
||||
env:
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Set release channel to nightly
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/upload-nightly.ps1 windows
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
needs:
|
||||
- bundle-mac
|
||||
- bundle-linux-x86
|
||||
- bundle-linux-arm
|
||||
- bundle-windows-x64
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update nightly tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
with:
|
||||
environment: production
|
||||
|
||||
59
.github/workflows/run_agent_evals.yml
vendored
59
.github/workflows/run_agent_evals.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_agent_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
agent_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build --package=eval
|
||||
run: cargo build --package=eval
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::agent_evals::run_eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 600
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
269
.github/workflows/run_bundling.yml
vendored
269
.github/workflows/run_bundling.yml
vendored
@@ -1,269 +0,0 @@
|
||||
# Generated from xtask::workflows::run_bundling
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_bundling
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
jobs:
|
||||
bundle_linux_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
579
.github/workflows/run_tests.yml
vendored
579
.github/workflows/run_tests.yml
vendored
@@ -1,579 +0,0 @@
|
||||
# Generated from xtask::workflows::run_tests
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_tests
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
CARGO_INCREMENTAL: '0'
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v[0-9]+.[0-9]+.x
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- id: filter
|
||||
name: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
check_pattern() {
|
||||
local output_name="$1"
|
||||
local pattern="$2"
|
||||
local grep_arg="$3"
|
||||
|
||||
echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
|
||||
echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "${output_name}=false" >> "$GITHUB_OUTPUT"
|
||||
}
|
||||
|
||||
check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP
|
||||
check_pattern "run_docs" '^docs/' -qP
|
||||
check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
|
||||
check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
|
||||
check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
|
||||
shell: bash -euxo pipefail {0}
|
||||
outputs:
|
||||
run_action_checks: ${{ steps.filter.outputs.run_action_checks }}
|
||||
run_docs: ${{ steps.filter.outputs.run_docs }}
|
||||
run_licenses: ${{ steps.filter.outputs.run_licenses }}
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_pnpm
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
with:
|
||||
version: '9'
|
||||
- name: ./script/prettier
|
||||
run: ./script/prettier
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/check-todos
|
||||
run: ./script/check-todos
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/check-keymaps
|
||||
run: ./script/check-keymaps
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_style::check_for_typos
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
|
||||
with:
|
||||
config: ./typos.toml
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_mac:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
doctests:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: run_doctests
|
||||
name: run_tests::doctests::run_doctests
|
||||
run: |
|
||||
cargo test --workspace --doc --no-fail-fast
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_workspace_binaries:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build --workspace --bins --examples
|
||||
run: cargo build --workspace --bins --examples
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_dependencies:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: run_tests::check_dependencies::install_cargo_machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
|
||||
with:
|
||||
command: install
|
||||
args: cargo-machete@0.7.0
|
||||
- name: run_tests::check_dependencies::run_cargo_machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
|
||||
with:
|
||||
command: machete
|
||||
- name: run_tests::check_dependencies::check_cargo_lock
|
||||
run: cargo update --locked --workspace
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_dependencies::check_vulnerable_dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8
|
||||
with:
|
||||
license-check: false
|
||||
timeout-minutes: 60
|
||||
check_docs:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_docs == 'true'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: run_tests::check_docs::lychee_link_check
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
|
||||
with:
|
||||
args: --no-progress --exclude '^http' './docs/src/**/*'
|
||||
fail: true
|
||||
jobSummary: false
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::install_mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
|
||||
with:
|
||||
mdbook-version: 0.4.37
|
||||
- name: run_tests::check_docs::build_docs
|
||||
run: |
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::lychee_link_check
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
|
||||
with:
|
||||
args: --no-progress --exclude '^http' 'target/deploy/docs'
|
||||
fail: true
|
||||
jobSummary: false
|
||||
timeout-minutes: 60
|
||||
check_licenses:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_licenses == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: ./script/check-licenses
|
||||
run: ./script/check-licenses
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/generate-licenses
|
||||
run: ./script/generate-licenses
|
||||
shell: bash -euxo pipefail {0}
|
||||
check_scripts:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_action_checks == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_tests::check_scripts::run_shellcheck
|
||||
run: ./script/shellcheck-scripts error
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get_actionlint
|
||||
name: run_tests::check_scripts::download_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::run_actionlint
|
||||
run: |
|
||||
${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::check_xtask_workflows
|
||||
run: |
|
||||
cargo xtask workflows
|
||||
if ! git diff --exit-code .github; then
|
||||
echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
|
||||
echo "Please run 'cargo xtask workflows' locally and commit the changes"
|
||||
exit 1
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
build_nix_linux_x86_64:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_nix == 'true'
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::install_nix
|
||||
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
pushFilter: -zed-editor-[0-9.]*-nightly
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#debug -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
build_nix_mac_aarch64:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_nix == 'true'
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::set_path
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
pushFilter: -zed-editor-[0-9.]*-nightly
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#debug -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::limit_store
|
||||
run: |-
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
- run_tests_linux
|
||||
- run_tests_mac
|
||||
- doctests
|
||||
- check_workspace_binaries
|
||||
- check_dependencies
|
||||
- check_docs
|
||||
- check_licenses
|
||||
- check_scripts
|
||||
- build_nix_linux_x86_64
|
||||
- build_nix_mac_aarch64
|
||||
if: github.repository_owner == 'zed-industries' && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
run: |
|
||||
set +x
|
||||
EXIT_CODE=0
|
||||
|
||||
check_result() {
|
||||
echo "* $1: $2"
|
||||
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
|
||||
}
|
||||
|
||||
check_result "orchestrate" "${{ needs.orchestrate.result }}"
|
||||
check_result "check_style" "${{ needs.check_style.result }}"
|
||||
check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
|
||||
check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
|
||||
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
|
||||
check_result "doctests" "${{ needs.doctests.result }}"
|
||||
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
|
||||
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
|
||||
check_result "check_docs" "${{ needs.check_docs.result }}"
|
||||
check_result "check_licenses" "${{ needs.check_licenses.result }}"
|
||||
check_result "check_scripts" "${{ needs.check_scripts.result }}"
|
||||
check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}"
|
||||
check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}"
|
||||
|
||||
exit $EXIT_CODE
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
66
.github/workflows/run_unit_evals.yml
vendored
66
.github/workflows/run_unit_evals.yml
vendored
@@ -1,66 +0,0 @@
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_agent_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
on:
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/run-unit-evals
|
||||
run: ./script/run-unit-evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
- name: run_agent_evals::unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
21
.github/workflows/script_checks.yml
vendored
Normal file
21
.github/workflows/script_checks.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Script
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "script/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: "ShellCheck Scripts"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- name: Shellcheck ./scripts
|
||||
run: |
|
||||
./script/shellcheck-scripts error
|
||||
86
.github/workflows/unit_evals.yml
vendored
Normal file
86
.github/workflows/unit_evals.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: Run Unit Evals
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
|
||||
- cron: "47 1 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
unit_evals:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
timeout-minutes: 60
|
||||
name: Run unit evals
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run unit evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)'
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
- name: Send failure message to Slack channel if needed
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
187
Cargo.lock
generated
187
Cargo.lock
generated
@@ -32,7 +32,6 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"telemetry",
|
||||
"tempfile",
|
||||
"terminal",
|
||||
"ui",
|
||||
@@ -40,7 +39,6 @@ dependencies = [
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -81,7 +79,6 @@ dependencies = [
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"telemetry",
|
||||
"text",
|
||||
"util",
|
||||
"watch",
|
||||
@@ -96,7 +93,6 @@ dependencies = [
|
||||
"auto_update",
|
||||
"editor",
|
||||
"extension_host",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -251,6 +247,7 @@ dependencies = [
|
||||
"acp_tools",
|
||||
"action_log",
|
||||
"agent-client-protocol",
|
||||
"agent_settings",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"client",
|
||||
@@ -1331,14 +1328,10 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"clock",
|
||||
"ctor",
|
||||
"db",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"release_channel",
|
||||
"serde",
|
||||
@@ -1346,10 +1339,8 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1359,7 +1350,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"log",
|
||||
"simplelog",
|
||||
"tempfile",
|
||||
"windows 0.61.3",
|
||||
"winresource",
|
||||
]
|
||||
@@ -3206,7 +3196,6 @@ dependencies = [
|
||||
"indoc",
|
||||
"ordered-float 2.10.1",
|
||||
"rustc-hash 2.1.1",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
@@ -4539,15 +4528,12 @@ dependencies = [
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"json_dotpath",
|
||||
"language",
|
||||
"log",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"util",
|
||||
@@ -4916,18 +4902,6 @@ dependencies = [
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_setters"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9"
|
||||
dependencies = [
|
||||
"darling 0.20.11",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deunicode"
|
||||
version = "1.6.2"
|
||||
@@ -4946,7 +4920,6 @@ dependencies = [
|
||||
"editor",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
@@ -5847,6 +5820,8 @@ name = "extension"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"dap",
|
||||
@@ -6413,7 +6388,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"libc",
|
||||
"log",
|
||||
"notify 8.2.0",
|
||||
"notify 8.0.0",
|
||||
"objc",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
@@ -6967,33 +6942,6 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gh-workflow"
|
||||
version = "0.8.0"
|
||||
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"derive_more 2.0.1",
|
||||
"derive_setters",
|
||||
"gh-workflow-macros",
|
||||
"indexmap 2.11.4",
|
||||
"merge",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"strum_macros 0.27.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gh-workflow-macros"
|
||||
version = "0.8.0"
|
||||
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gif"
|
||||
version = "0.13.3"
|
||||
@@ -7087,7 +7035,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
]
|
||||
|
||||
@@ -7100,6 +7047,7 @@ dependencies = [
|
||||
"askpass",
|
||||
"buffer_diff",
|
||||
"call",
|
||||
"chrono",
|
||||
"cloud_llm_client",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
@@ -7125,8 +7073,6 @@ dependencies = [
|
||||
"picker",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"recent_projects",
|
||||
"remote",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -7278,7 +7224,6 @@ dependencies = [
|
||||
"async-task",
|
||||
"backtrace",
|
||||
"bindgen 0.71.1",
|
||||
"bitflags 2.9.4",
|
||||
"blade-graphics",
|
||||
"blade-macros",
|
||||
"blade-util",
|
||||
@@ -7358,7 +7303,6 @@ dependencies = [
|
||||
"wayland-cursor",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-protocols-plasma",
|
||||
"wayland-protocols-wlr",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
"windows-numerics",
|
||||
@@ -7805,7 +7749,6 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"url",
|
||||
@@ -8873,7 +8816,6 @@ dependencies = [
|
||||
"open_router",
|
||||
"parking_lot",
|
||||
"proto",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -9038,7 +8980,6 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"terminal",
|
||||
"text",
|
||||
"theme",
|
||||
"toml 0.8.23",
|
||||
@@ -9686,7 +9627,6 @@ dependencies = [
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
@@ -9871,28 +9811,6 @@ dependencies = [
|
||||
"gpui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "merge"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10bbef93abb1da61525bbc45eeaff6473a41907d19f8f9aa5168d214e10693e9"
|
||||
dependencies = [
|
||||
"merge_derive",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "merge_derive"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "209d075476da2e63b4b29e72a2ef627b840589588e71400a25e3565c4f849d07"
|
||||
dependencies = [
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "metal"
|
||||
version = "0.29.0"
|
||||
@@ -10421,10 +10339,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.2.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
version = "8.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"filetime",
|
||||
"fsevent-sys 4.1.0",
|
||||
"inotify 0.11.0",
|
||||
"kqueue",
|
||||
@@ -10433,7 +10352,7 @@ dependencies = [
|
||||
"mio 1.1.0",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10450,7 +10369,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "notify-types"
|
||||
version = "2.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
|
||||
[[package]]
|
||||
name = "now"
|
||||
@@ -12881,30 +12800,6 @@ dependencies = [
|
||||
"toml_edit 0.23.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
@@ -13143,6 +13038,7 @@ dependencies = [
|
||||
"paths",
|
||||
"rope",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"text",
|
||||
"util",
|
||||
"uuid",
|
||||
@@ -13982,7 +13878,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"http_client",
|
||||
"image",
|
||||
"json_schema_store",
|
||||
"language",
|
||||
"language_extension",
|
||||
@@ -14331,6 +14226,7 @@ dependencies = [
|
||||
"log",
|
||||
"rand 0.9.2",
|
||||
"rayon",
|
||||
"regex",
|
||||
"sum_tree",
|
||||
"unicode-segmentation",
|
||||
"util",
|
||||
@@ -15328,19 +15224,6 @@ dependencies = [
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.34+deprecated"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
|
||||
dependencies = [
|
||||
"indexmap 2.11.4",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
"unsafe-libyaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serial2"
|
||||
version = "0.2.33"
|
||||
@@ -16224,6 +16107,7 @@ dependencies = [
|
||||
"log",
|
||||
"menu",
|
||||
"picker",
|
||||
"project",
|
||||
"reqwest_client",
|
||||
"rust-embed",
|
||||
"settings",
|
||||
@@ -16233,6 +16117,7 @@ dependencies = [
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -16512,7 +16397,7 @@ dependencies = [
|
||||
"editor",
|
||||
"file_icons",
|
||||
"gpui",
|
||||
"language",
|
||||
"multi_buffer",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
@@ -17164,7 +17049,6 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"rand 0.9.2",
|
||||
"regex",
|
||||
"rope",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
@@ -17457,7 +17341,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"auto_update",
|
||||
"call",
|
||||
"channel",
|
||||
"chrono",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
@@ -18059,7 +17942,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter-gomod"
|
||||
version = "1.1.1"
|
||||
source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=2e886870578eeba1927a2dc4bd2e2b3f598c5f9a#2e886870578eeba1927a2dc4bd2e2b3f598c5f9a"
|
||||
source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c#6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -18528,12 +18411,6 @@ version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
|
||||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.9.0"
|
||||
@@ -18628,7 +18505,6 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
"mach2 0.5.0",
|
||||
"nix 0.29.0",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.2",
|
||||
@@ -18818,6 +18694,7 @@ dependencies = [
|
||||
name = "vim_mode_setting"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"gpui",
|
||||
"settings",
|
||||
]
|
||||
|
||||
@@ -19508,19 +19385,6 @@ dependencies = [
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-wlr"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "efd94963ed43cf9938a090ca4f7da58eb55325ec8200c3848963e98dc25b78ec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols 0.32.9",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-scanner"
|
||||
version = "0.31.7"
|
||||
@@ -20954,14 +20818,10 @@ name = "xtask"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"backtrace",
|
||||
"cargo_metadata",
|
||||
"cargo_toml",
|
||||
"clap",
|
||||
"gh-workflow",
|
||||
"indexmap 2.11.4",
|
||||
"indoc",
|
||||
"serde",
|
||||
"toml 0.8.23",
|
||||
"toml_edit 0.22.27",
|
||||
]
|
||||
@@ -21146,7 +21006,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.213.0"
|
||||
version = "0.211.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -21238,7 +21098,6 @@ dependencies = [
|
||||
"project_symbols",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rayon",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
"remote",
|
||||
@@ -21674,7 +21533,6 @@ dependencies = [
|
||||
"clock",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction",
|
||||
"edit_prediction_context",
|
||||
"feature_flags",
|
||||
@@ -21685,32 +21543,27 @@ dependencies = [
|
||||
"language_model",
|
||||
"log",
|
||||
"lsp",
|
||||
"open_ai",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"release_channel",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"thiserror 2.0.17",
|
||||
"util",
|
||||
"uuid",
|
||||
"workspace",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeta2_tools"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction_context",
|
||||
"editor",
|
||||
@@ -21755,7 +21608,6 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"indoc",
|
||||
"language",
|
||||
"language_extension",
|
||||
"language_model",
|
||||
@@ -21766,10 +21618,8 @@ dependencies = [
|
||||
"ordered-float 2.10.1",
|
||||
"paths",
|
||||
"polars",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"pulldown-cmark 0.12.2",
|
||||
"release_channel",
|
||||
"reqwest_client",
|
||||
"serde",
|
||||
@@ -21779,7 +21629,6 @@ dependencies = [
|
||||
"smol",
|
||||
"soa-rs",
|
||||
"terminal_view",
|
||||
"toml 0.8.23",
|
||||
"util",
|
||||
"watch",
|
||||
"zeta",
|
||||
|
||||
@@ -508,7 +508,6 @@ fork = "0.2.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "3eaa84abca0778eb54272f45a312cb24f9a0b435" }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
@@ -663,7 +662,6 @@ time = { version = "0.3", features = [
|
||||
"serde",
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"local-offset",
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
tokio = { version = "1" }
|
||||
@@ -681,7 +679,7 @@ tree-sitter-elixir = "0.3"
|
||||
tree-sitter-embedded-template = "0.23.0"
|
||||
tree-sitter-gitcommit = { git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9" }
|
||||
tree-sitter-go = "0.23"
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "2e886870578eeba1927a2dc4bd2e2b3f598c5f9a", package = "tree-sitter-gomod" }
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
|
||||
tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" }
|
||||
tree-sitter-html = "0.23"
|
||||
@@ -773,8 +771,8 @@ features = [
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
|
||||
|
||||
[profile.dev]
|
||||
|
||||
2
Procfile.postgrest
Normal file
2
Procfile.postgrest
Normal file
@@ -0,0 +1,2 @@
|
||||
app: postgrest crates/collab/postgrest_app.conf
|
||||
llm: postgrest crates/collab/postgrest_llm.conf
|
||||
@@ -1 +1,2 @@
|
||||
postgrest_llm: postgrest crates/collab/postgrest_llm.conf
|
||||
website: cd ../zed.dev; npm run dev -- --port=3000
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Zed
|
||||
|
||||
[](https://zed.dev)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
|
||||
166
REVIEWERS.conl
166
REVIEWERS.conl
@@ -8,110 +8,100 @@
|
||||
; to other areas too.
|
||||
|
||||
<all>
|
||||
= @cole-miller
|
||||
= @ConradIrwin
|
||||
= @danilo-leal
|
||||
= @dinocosta
|
||||
= @HactarCE
|
||||
= @kubkon
|
||||
= @maxdeviant
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
ai
|
||||
= @benbrandt
|
||||
= @bennetbo
|
||||
= @danilo-leal
|
||||
= @rtfeldman
|
||||
|
||||
audio
|
||||
= @dvdsk
|
||||
|
||||
crashes
|
||||
= @p1n3appl3
|
||||
= @Veykril
|
||||
|
||||
debugger
|
||||
= @Anthony-Eid
|
||||
= @kubkon
|
||||
= @osiewicz
|
||||
|
||||
design
|
||||
= @danilo-leal
|
||||
|
||||
docs
|
||||
= @probably-neb
|
||||
|
||||
extension
|
||||
= @danilo-leal
|
||||
= @Veykril
|
||||
= @kubkon
|
||||
= @p1n3appl3
|
||||
= @dinocosta
|
||||
= @smitbarmase
|
||||
= @cole-miller
|
||||
|
||||
vim
|
||||
= @ConradIrwin
|
||||
= @probably-neb
|
||||
= @p1n3appl3
|
||||
= @dinocosta
|
||||
|
||||
gpui
|
||||
= @mikayla-maki
|
||||
|
||||
git
|
||||
= @cole-miller
|
||||
= @danilo-leal
|
||||
|
||||
gpui
|
||||
= @Anthony-Eid
|
||||
= @cameron1024
|
||||
= @mikayla-maki
|
||||
= @probably-neb
|
||||
linux
|
||||
= @dvdsk
|
||||
= @smitbarmase
|
||||
= @p1n3appl3
|
||||
= @cole-miller
|
||||
|
||||
windows
|
||||
= @reflectronic
|
||||
= @localcc
|
||||
|
||||
pickers
|
||||
= @p1n3appl3
|
||||
= @dvdsk
|
||||
= @SomeoneToIgnore
|
||||
|
||||
audio
|
||||
= @dvdsk
|
||||
|
||||
helix
|
||||
= @kubkon
|
||||
|
||||
languages
|
||||
= @osiewicz
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
linux
|
||||
= @cole-miller
|
||||
= @dvdsk
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
|
||||
lsp
|
||||
= @osiewicz
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
multi_buffer
|
||||
= @Veykril
|
||||
= @SomeoneToIgnore
|
||||
|
||||
pickers
|
||||
= @dvdsk
|
||||
= @p1n3appl3
|
||||
= @SomeoneToIgnore
|
||||
|
||||
project_panel
|
||||
= @smitbarmase
|
||||
|
||||
settings_ui
|
||||
= @Anthony-Eid
|
||||
= @danilo-leal
|
||||
= @probably-neb
|
||||
|
||||
tasks
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
terminal
|
||||
= @kubkon
|
||||
= @Veykril
|
||||
|
||||
vim
|
||||
= @ConradIrwin
|
||||
= @dinocosta
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
debugger
|
||||
= @kubkon
|
||||
= @osiewicz
|
||||
= @Anthony-Eid
|
||||
|
||||
windows
|
||||
= @localcc
|
||||
= @reflectronic
|
||||
extension
|
||||
= @kubkon
|
||||
|
||||
settings_ui
|
||||
= @probably-neb
|
||||
= @danilo-leal
|
||||
= @Anthony-Eid
|
||||
|
||||
crashes
|
||||
= @p1n3appl3
|
||||
= @Veykril
|
||||
|
||||
ai
|
||||
= @rtfeldman
|
||||
= @danilo-leal
|
||||
= @benbrandt
|
||||
|
||||
design
|
||||
= @danilo-leal
|
||||
|
||||
multi_buffer
|
||||
= @Veykril
|
||||
= @SomeoneToIgnore
|
||||
|
||||
lsp
|
||||
= @osiewicz
|
||||
= @Veykril
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
|
||||
languages
|
||||
= @osiewicz
|
||||
= @Veykril
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
|
||||
project_panel
|
||||
= @smitbarmase
|
||||
|
||||
tasks
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.3335 13.3333L8.00017 10L4.66685 13.3333" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.3335 2.66669L8.00017 6.00002L4.66685 2.66669" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 382 B |
@@ -1,5 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.2 11H5C4.20435 11 3.44129 10.6839 2.87868 10.1213C2.31607 9.55871 2 8.79565 2 8C2 7.20435 2.31607 6.44129 2.87868 5.87868C3.44129 5.31607 4.20435 5 5 5H6.2" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.80005 5H11C11.7957 5 12.5588 5.31607 13.1214 5.87868C13.684 6.44129 14 7.20435 14 8C14 8.79565 13.684 9.55871 13.1214 10.1213C12.5588 10.6839 11.7957 11 11 11H9.80005" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5.6001 8H10.4001" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 735 B |
|
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 3.9 KiB After Width: | Height: | Size: 3.9 KiB |
@@ -43,8 +43,7 @@
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -236,13 +235,12 @@
|
||||
"ctrl-alt-n": "agent::NewTextThread",
|
||||
"ctrl-shift-h": "agent::OpenHistory",
|
||||
"ctrl-alt-c": "agent::OpenSettings",
|
||||
"ctrl-alt-p": "agent::ManageProfiles",
|
||||
"ctrl-alt-l": "agent::OpenRulesLibrary",
|
||||
"ctrl-alt-p": "agent::OpenRulesLibrary",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"ctrl-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl->": "agent::AddSelectionToThread",
|
||||
@@ -409,7 +407,6 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"shift-find": "search::FocusSearch",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"ctrl-shift-f": "search::FocusSearch",
|
||||
"ctrl-shift-h": "search::ToggleReplace",
|
||||
"alt-ctrl-g": "search::ToggleRegex",
|
||||
@@ -482,7 +479,6 @@
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"alt-find": "project_search::ToggleFilters",
|
||||
"alt-ctrl-f": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"ctrl-alt-shift-r": "search::ToggleRegex",
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex",
|
||||
"alt-r": "search::ToggleRegex",
|
||||
@@ -613,7 +609,7 @@
|
||||
"ctrl-alt-b": "workspace::ToggleRightDock",
|
||||
"ctrl-b": "workspace::ToggleLeftDock",
|
||||
"ctrl-j": "workspace::ToggleBottomDock",
|
||||
"ctrl-alt-y": "workspace::ToggleAllDocks",
|
||||
"ctrl-alt-y": "workspace::CloseAllDocks",
|
||||
"ctrl-alt-0": "workspace::ResetActiveDockSize",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -735,20 +731,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -951,7 +933,6 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1031,8 +1012,7 @@
|
||||
"context": "CollabPanel",
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1146,8 +1126,7 @@
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask",
|
||||
"ctrl-shift-5": "pane::SplitRight"
|
||||
"alt-t": "terminal::RerunTask"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1263,14 +1242,6 @@
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1327,12 +1298,5 @@
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -49,8 +49,7 @@
|
||||
"ctrl-cmd-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu",
|
||||
"cmd-alt-.": "project_panel::ToggleHideHidden"
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -275,13 +274,12 @@
|
||||
"cmd-alt-n": "agent::NewTextThread",
|
||||
"cmd-shift-h": "agent::OpenHistory",
|
||||
"cmd-alt-c": "agent::OpenSettings",
|
||||
"cmd-alt-l": "agent::OpenRulesLibrary",
|
||||
"cmd-alt-p": "agent::ManageProfiles",
|
||||
"cmd-alt-p": "agent::OpenRulesLibrary",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"cmd-alt-/": "agent::ToggleModelSelector",
|
||||
"cmd-shift-a": "agent::ToggleContextPicker",
|
||||
"cmd-shift-j": "agent::ToggleNavigationMenu",
|
||||
"cmd-alt-m": "agent::ToggleOptionsMenu",
|
||||
"cmd-shift-i": "agent::ToggleOptionsMenu",
|
||||
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"cmd->": "agent::AddSelectionToThread",
|
||||
@@ -470,7 +468,6 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"cmd-shift-j": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"cmd-shift-f": "search::FocusSearch",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
@@ -499,7 +496,6 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"cmd-shift-j": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
@@ -683,7 +679,7 @@
|
||||
"cmd-alt-b": "workspace::ToggleRightDock",
|
||||
"cmd-r": "workspace::ToggleRightDock",
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
"alt-cmd-y": "workspace::ToggleAllDocks",
|
||||
"alt-cmd-y": "workspace::CloseAllDocks",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"ctrl-alt-0": "workspace::ResetActiveDockSize",
|
||||
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -805,20 +801,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
@@ -1044,7 +1026,6 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1096,8 +1077,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1229,7 +1209,6 @@
|
||||
"ctrl-alt-down": "pane::SplitDown",
|
||||
"ctrl-alt-left": "pane::SplitLeft",
|
||||
"ctrl-alt-right": "pane::SplitRight",
|
||||
"cmd-d": "pane::SplitRight",
|
||||
"cmd-alt-r": "terminal::RerunTask"
|
||||
}
|
||||
},
|
||||
@@ -1368,14 +1347,6 @@
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1433,12 +1404,5 @@
|
||||
"cmd-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"cmd-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -41,8 +41,7 @@
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu",
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -237,13 +236,12 @@
|
||||
"shift-alt-n": "agent::NewTextThread",
|
||||
"ctrl-shift-h": "agent::OpenHistory",
|
||||
"shift-alt-c": "agent::OpenSettings",
|
||||
"shift-alt-l": "agent::OpenRulesLibrary",
|
||||
"shift-alt-p": "agent::ManageProfiles",
|
||||
"shift-alt-p": "agent::OpenRulesLibrary",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"shift-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "agent::AddSelectionToThread",
|
||||
@@ -490,7 +488,6 @@
|
||||
"alt-c": "search::ToggleCaseSensitive",
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"alt-f": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"alt-r": "search::ToggleRegex",
|
||||
// "ctrl-shift-alt-x": "search::ToggleRegex",
|
||||
"ctrl-k shift-enter": "pane::TogglePinTab"
|
||||
@@ -617,7 +614,7 @@
|
||||
"ctrl-alt-b": "workspace::ToggleRightDock",
|
||||
"ctrl-b": "workspace::ToggleLeftDock",
|
||||
"ctrl-j": "workspace::ToggleBottomDock",
|
||||
"ctrl-shift-y": "workspace::ToggleAllDocks",
|
||||
"ctrl-shift-y": "workspace::CloseAllDocks",
|
||||
"alt-r": "workspace::ResetActiveDockSize",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -739,20 +736,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -960,7 +943,6 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1048,8 +1030,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1171,8 +1152,7 @@
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask",
|
||||
"ctrl-shift-5": "pane::SplitRight"
|
||||
"alt-t": "terminal::RerunTask"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1290,14 +1270,6 @@
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1355,12 +1327,5 @@
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"ctrl-shift-f12": "workspace::ToggleAllDocks",
|
||||
"ctrl-shift-f12": "workspace::CloseAllDocks",
|
||||
"ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"alt-shift-f10": "task::Spawn",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"cmd-shift-f12": "workspace::ToggleAllDocks",
|
||||
"cmd-shift-f12": "workspace::CloseAllDocks",
|
||||
"cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-alt-r": "task::Spawn",
|
||||
"cmd-e": "file_finder::Toggle",
|
||||
|
||||
@@ -220,8 +220,6 @@
|
||||
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
|
||||
"] )": ["vim::UnmatchedForward", { "char": ")" }],
|
||||
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
|
||||
"[ r": "vim::GoToPreviousReference",
|
||||
"] r": "vim::GoToNextReference",
|
||||
// tree-sitter related commands
|
||||
"[ x": "vim::SelectLargerSyntaxNode",
|
||||
"] x": "vim::SelectSmallerSyntaxNode"
|
||||
@@ -421,12 +419,6 @@
|
||||
"ctrl-[": "editor::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == helix_select && !menu",
|
||||
"bindings": {
|
||||
"escape": "vim::SwitchToHelixNormalMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
|
||||
"bindings": {
|
||||
@@ -455,7 +447,6 @@
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"d": "vim::HelixDelete",
|
||||
"alt-d": "editor::Delete", // Delete selection, without yanking
|
||||
"c": "vim::HelixSubstitute",
|
||||
"alt-c": "vim::HelixSubstituteNoYank",
|
||||
|
||||
|
||||
179
assets/prompts/assistant_system_prompt.hbs
Normal file
179
assets/prompts/assistant_system_prompt.hbs
Normal file
@@ -0,0 +1,179 @@
|
||||
You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.
|
||||
|
||||
## Communication
|
||||
|
||||
1. Be conversational but professional.
|
||||
2. Refer to the user in the second person and yourself in the first person.
|
||||
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names.
|
||||
4. NEVER lie or make things up.
|
||||
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
|
||||
|
||||
{{#if has_tools}}
|
||||
## Tool Use
|
||||
|
||||
1. Make sure to adhere to the tools schema.
|
||||
2. Provide every required argument.
|
||||
3. DO NOT use tools to access items that are already available in the context section.
|
||||
4. Use only the tools that are currently available.
|
||||
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
|
||||
6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers.
|
||||
7. Avoid HTML entity escaping - use plain characters instead.
|
||||
|
||||
## Searching and Reading
|
||||
|
||||
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
|
||||
|
||||
{{! TODO: If there are files, we should mention it but otherwise omit that fact }}
|
||||
If appropriate, use tool calls to explore the current project, which contains the following root directories:
|
||||
|
||||
{{#each worktrees}}
|
||||
- `{{abs_path}}`
|
||||
{{/each}}
|
||||
|
||||
- Bias towards not asking the user for help if you can find the answer yourself.
|
||||
- When providing paths to tools, the path should always start with the name of a project root directory listed above.
|
||||
- Before you read or edit a file, you must first find the full path. DO NOT ever guess a file path!
|
||||
{{# if (has_tool 'grep') }}
|
||||
- When looking for symbols in the project, prefer the `grep` tool.
|
||||
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
|
||||
- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file.
|
||||
{{/if}}
|
||||
{{else}}
|
||||
You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you).
|
||||
|
||||
As such, if you need the user to perform any actions for you, you must request them explicitly. Bias towards giving a response to the best of your ability, and then making requests for the user to take action (e.g. to give you more context) only optionally.
|
||||
|
||||
The one exception to this is if the user references something you don't know about - for example, the name of a source code file, function, type, or other piece of code that you have no awareness of. In this case, you MUST NOT MAKE SOMETHING UP, or assume you know what that thing is or how it works. Instead, you must ask the user for clarification rather than giving a response.
|
||||
{{/if}}
|
||||
|
||||
## Code Block Formatting
|
||||
|
||||
Whenever you mention a code block, you MUST use ONLY use the following format:
|
||||
```path/to/Something.blah#L123-456
|
||||
(code goes here)
|
||||
```
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
|
||||
is a path in the project. (If there is no valid path in the project, then you can use
|
||||
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
|
||||
does not understand the more common ```language syntax, or bare ``` blocks. It only
|
||||
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
|
||||
You have made a mistake. You can only ever put paths after triple backticks!
|
||||
<example>
|
||||
Based on all the information I've gathered, here's a summary of how this system works:
|
||||
1. The README file is loaded into the system.
|
||||
2. The system finds the first two headers, including everything in between. In this case, that would be:
|
||||
```path/to/README.md#L8-12
|
||||
# First Header
|
||||
This is the info under the first header.
|
||||
## Sub-header
|
||||
```
|
||||
3. Then the system finds the last header in the README:
|
||||
```path/to/README.md#L27-29
|
||||
## Last Header
|
||||
This is the last header in the README.
|
||||
```
|
||||
4. Finally, it passes this information on to the next process.
|
||||
</example>
|
||||
<example>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</example>
|
||||
Here are examples of ways you must never render code blocks:
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it does not include the path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it has the language instead of the path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it uses indentation to mark the code block
|
||||
instead of backticks with a path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks.
|
||||
|
||||
{{#if has_tools}}
|
||||
## Fixing Diagnostics
|
||||
|
||||
1. Make 1-2 attempts at fixing diagnostics, then defer to the user.
|
||||
2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem.
|
||||
|
||||
## Debugging
|
||||
|
||||
When debugging, only make code changes if you are certain that you can solve the problem.
|
||||
Otherwise, follow debugging best practices:
|
||||
1. Address the root cause instead of the symptoms.
|
||||
2. Add descriptive logging statements and error messages to track variable and code state.
|
||||
3. Add test functions and statements to isolate the problem.
|
||||
|
||||
{{/if}}
|
||||
## Calling External APIs
|
||||
|
||||
1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission.
|
||||
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data.
|
||||
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
|
||||
|
||||
## System Information
|
||||
|
||||
Operating System: {{os}}
|
||||
Default Shell: {{shell}}
|
||||
|
||||
{{#if (or has_rules has_user_rules)}}
|
||||
## User's Custom Instructions
|
||||
|
||||
The following additional instructions are provided by the user, and should be followed to the best of your ability{{#if has_tools}} without interfering with the tool use guidelines{{/if}}.
|
||||
|
||||
{{#if has_rules}}
|
||||
There are project rules that apply to these root directories:
|
||||
{{#each worktrees}}
|
||||
{{#if rules_file}}
|
||||
`{{root_name}}/{{rules_file.path_in_worktree}}`:
|
||||
``````
|
||||
{{{rules_file.text}}}
|
||||
``````
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_user_rules}}
|
||||
The user has specified the following rules that should be applied:
|
||||
{{#each user_rules}}
|
||||
|
||||
{{#if title}}
|
||||
Rules title: {{title}}
|
||||
{{/if}}
|
||||
``````
|
||||
{{contents}}
|
||||
``````
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
@@ -255,19 +255,6 @@
|
||||
// Whether to display inline and alongside documentation for items in the
|
||||
// completions menu
|
||||
"show_completion_documentation": true,
|
||||
// When to show the scrollbar in the completion menu.
|
||||
// This setting can take four values:
|
||||
//
|
||||
// 1. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior
|
||||
// "auto"
|
||||
// 2. Match the system's configured behavior:
|
||||
// "system"
|
||||
// 3. Always show the scrollbar:
|
||||
// "always"
|
||||
// 4. Never show the scrollbar:
|
||||
// "never" (default)
|
||||
"completion_menu_scrollbar": "never",
|
||||
// Show method signatures in the editor, when inside parentheses.
|
||||
"auto_signature_help": false,
|
||||
// Whether to show the signature help after completion or a bracket pair inserted.
|
||||
@@ -605,11 +592,7 @@
|
||||
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
|
||||
// happens when a user holds the alt or option key while scrolling.
|
||||
"fast_scroll_sensitivity": 4.0,
|
||||
"sticky_scroll": {
|
||||
// Whether to stick scopes to the top of the editor.
|
||||
"enabled": false
|
||||
},
|
||||
"relative_line_numbers": "disabled",
|
||||
"relative_line_numbers": false,
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
// Search options to enable by default when opening new project and buffer searches.
|
||||
@@ -619,9 +602,7 @@
|
||||
"whole_word": false,
|
||||
"case_sensitive": false,
|
||||
"include_ignored": false,
|
||||
"regex": false,
|
||||
// Whether to center the cursor on each search match when navigating.
|
||||
"center_on_match": false
|
||||
"regex": false
|
||||
},
|
||||
// When to populate a new search's query based on the text under the cursor.
|
||||
// This setting can take the following three values:
|
||||
@@ -1251,9 +1232,6 @@
|
||||
// that are overly broad can slow down Zed's file scanning. `file_scan_exclusions` takes
|
||||
// precedence over these inclusions.
|
||||
"file_scan_inclusions": [".env*"],
|
||||
// Globs to match files that will be considered "hidden". These files can be hidden from the
|
||||
// project panel by toggling the "hide_hidden" setting.
|
||||
"hidden_files": ["**/.*"],
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
@@ -1351,7 +1329,7 @@
|
||||
"model": null,
|
||||
"max_tokens": null
|
||||
},
|
||||
// Whether edit predictions are enabled when editing text threads in the agent panel.
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true
|
||||
},
|
||||
@@ -1491,11 +1469,7 @@
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be `csh`, `fish`, `nushell` and `power_shell`
|
||||
"activate_script": "default",
|
||||
// Preferred Conda manager to use when activating Conda environments.
|
||||
// Values: "auto", "conda", "mamba", "micromamba"
|
||||
// Default: "auto"
|
||||
"conda_manager": "auto"
|
||||
"activate_script": "default"
|
||||
}
|
||||
},
|
||||
"toolbar": {
|
||||
@@ -1726,7 +1700,6 @@
|
||||
"preferred_line_length": 72
|
||||
},
|
||||
"Go": {
|
||||
"hard_tabs": true,
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
@@ -1745,9 +1718,6 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"HTML+ERB": {
|
||||
"language_servers": ["herb", "!ruby-lsp", "..."]
|
||||
},
|
||||
"Java": {
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
@@ -1770,9 +1740,6 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"JS+ERB": {
|
||||
"language_servers": ["!ruby-lsp", "..."]
|
||||
},
|
||||
"Kotlin": {
|
||||
"language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."]
|
||||
},
|
||||
@@ -1787,7 +1754,6 @@
|
||||
"Markdown": {
|
||||
"format_on_save": "off",
|
||||
"use_on_type_format": false,
|
||||
"remove_trailing_whitespace_on_save": false,
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width",
|
||||
"prettier": {
|
||||
@@ -1803,8 +1769,7 @@
|
||||
}
|
||||
},
|
||||
"Plain Text": {
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width"
|
||||
"allow_rewrap": "anywhere"
|
||||
},
|
||||
"Python": {
|
||||
"code_actions_on_format": {
|
||||
@@ -1878,9 +1843,6 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"YAML+ERB": {
|
||||
"language_servers": ["!ruby-lsp", "..."]
|
||||
},
|
||||
"Zig": {
|
||||
"language_servers": ["zls", "..."]
|
||||
}
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -412,8 +412,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Hard",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -818,8 +818,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Soft",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -1224,8 +1224,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
@@ -1630,8 +1630,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light Hard",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
@@ -2036,8 +2036,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light Soft",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
ARG NAMESPACE_BASE_IMAGE_REF=""
|
||||
|
||||
# Your image must build FROM NAMESPACE_BASE_IMAGE_REF
|
||||
FROM ${NAMESPACE_BASE_IMAGE_REF} AS base
|
||||
|
||||
# Remove problematic git-lfs packagecloud source
|
||||
RUN sudo rm -f /etc/apt/sources.list.d/*git-lfs*.list
|
||||
# Install git and SSH for cloning private repositories
|
||||
RUN sudo apt-get update && \
|
||||
sudo apt-get install -y git openssh-client
|
||||
|
||||
# Clone the Zed repository
|
||||
RUN git clone https://github.com/zed-industries/zed.git ~/zed
|
||||
|
||||
# Run the Linux installation script
|
||||
WORKDIR /home/runner/zed
|
||||
RUN ./script/linux
|
||||
|
||||
# Clean up unnecessary files to reduce image size
|
||||
RUN sudo apt-get clean && sudo rm -rf \
|
||||
/home/runner/zed
|
||||
@@ -9,9 +9,6 @@ disallowed-methods = [
|
||||
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
|
||||
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
|
||||
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
|
||||
{ path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" },
|
||||
{ path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" },
|
||||
{ path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" },
|
||||
{ path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
|
||||
{ path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
|
||||
]
|
||||
|
||||
26
compose.yml
26
compose.yml
@@ -33,6 +33,32 @@ services:
|
||||
volumes:
|
||||
- ./livekit.yaml:/livekit.yaml
|
||||
|
||||
postgrest_app:
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_app
|
||||
ports:
|
||||
- 8081:8081
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_app.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
postgrest_llm:
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_llm
|
||||
ports:
|
||||
- 8082:8082
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed_llm
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_llm.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
stripe-mock:
|
||||
image: docker.io/stripe/stripe-mock:v0.178.0
|
||||
ports:
|
||||
|
||||
@@ -39,7 +39,6 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
@@ -57,4 +56,3 @@ rand.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
settings.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -3,6 +3,7 @@ mod diff;
|
||||
mod mention;
|
||||
mod terminal;
|
||||
|
||||
use ::terminal::terminal_settings::TerminalSettings;
|
||||
use agent_settings::AgentSettings;
|
||||
use collections::HashSet;
|
||||
pub use connection::*;
|
||||
@@ -11,11 +12,11 @@ use language::language_settings::FormatOnSave;
|
||||
pub use mention::*;
|
||||
use project::lsp_store::{FormatTrigger, LspFormatTarget};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings as _;
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use task::{Shell, ShellBuilder};
|
||||
pub use terminal::*;
|
||||
|
||||
use action_log::{ActionLog, ActionLogTelemetry};
|
||||
use action_log::ActionLog;
|
||||
use agent_client_protocol::{self as acp};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use editor::Bias;
|
||||
@@ -34,7 +35,7 @@ use std::rc::Rc;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
|
||||
use ui::App;
|
||||
use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle};
|
||||
use util::{ResultExt, get_default_system_shell_preferring_bash};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -94,14 +95,9 @@ pub enum AssistantMessageChunk {
|
||||
}
|
||||
|
||||
impl AssistantMessageChunk {
|
||||
pub fn from_str(
|
||||
chunk: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
pub fn from_str(chunk: &str, language_registry: &Arc<LanguageRegistry>, cx: &mut App) -> Self {
|
||||
Self::Message {
|
||||
block: ContentBlock::new(chunk.into(), language_registry, path_style, cx),
|
||||
block: ContentBlock::new(chunk.into(), language_registry, cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,7 +186,6 @@ impl ToolCall {
|
||||
tool_call: acp::ToolCall,
|
||||
status: ToolCallStatus,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Self> {
|
||||
@@ -204,7 +199,6 @@ impl ToolCall {
|
||||
content.push(ToolCallContent::from_acp(
|
||||
item,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?);
|
||||
@@ -229,7 +223,6 @@ impl ToolCall {
|
||||
&mut self,
|
||||
fields: acp::ToolCallUpdateFields,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<()> {
|
||||
@@ -267,13 +260,12 @@ impl ToolCall {
|
||||
|
||||
// Reuse existing content if we can
|
||||
for (old, new) in self.content.iter_mut().zip(content.by_ref()) {
|
||||
old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?;
|
||||
old.update_from_acp(new, language_registry.clone(), terminals, cx)?;
|
||||
}
|
||||
for new in content {
|
||||
self.content.push(ToolCallContent::from_acp(
|
||||
new,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?)
|
||||
@@ -458,23 +450,21 @@ impl ContentBlock {
|
||||
pub fn new(
|
||||
block: acp::ContentBlock,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let mut this = Self::Empty;
|
||||
this.append(block, language_registry, path_style, cx);
|
||||
this.append(block, language_registry, cx);
|
||||
this
|
||||
}
|
||||
|
||||
pub fn new_combined(
|
||||
blocks: impl IntoIterator<Item = acp::ContentBlock>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let mut this = Self::Empty;
|
||||
for block in blocks {
|
||||
this.append(block, &language_registry, path_style, cx);
|
||||
this.append(block, &language_registry, cx);
|
||||
}
|
||||
this
|
||||
}
|
||||
@@ -483,7 +473,6 @@ impl ContentBlock {
|
||||
&mut self,
|
||||
block: acp::ContentBlock,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) {
|
||||
if matches!(self, ContentBlock::Empty)
|
||||
@@ -493,7 +482,7 @@ impl ContentBlock {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_content = self.block_string_contents(block, path_style);
|
||||
let new_content = self.block_string_contents(block);
|
||||
|
||||
match self {
|
||||
ContentBlock::Empty => {
|
||||
@@ -503,7 +492,7 @@ impl ContentBlock {
|
||||
markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx));
|
||||
}
|
||||
ContentBlock::ResourceLink { resource_link } => {
|
||||
let existing_content = Self::resource_link_md(&resource_link.uri, path_style);
|
||||
let existing_content = Self::resource_link_md(&resource_link.uri);
|
||||
let combined = format!("{}\n{}", existing_content, new_content);
|
||||
|
||||
*self = Self::create_markdown_block(combined, language_registry, cx);
|
||||
@@ -522,11 +511,11 @@ impl ContentBlock {
|
||||
}
|
||||
}
|
||||
|
||||
fn block_string_contents(&self, block: acp::ContentBlock, path_style: PathStyle) -> String {
|
||||
fn block_string_contents(&self, block: acp::ContentBlock) -> String {
|
||||
match block {
|
||||
acp::ContentBlock::Text(text_content) => text_content.text,
|
||||
acp::ContentBlock::ResourceLink(resource_link) => {
|
||||
Self::resource_link_md(&resource_link.uri, path_style)
|
||||
Self::resource_link_md(&resource_link.uri)
|
||||
}
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
resource:
|
||||
@@ -535,14 +524,14 @@ impl ContentBlock {
|
||||
..
|
||||
}),
|
||||
..
|
||||
}) => Self::resource_link_md(&uri, path_style),
|
||||
}) => Self::resource_link_md(&uri),
|
||||
acp::ContentBlock::Image(image) => Self::image_md(&image),
|
||||
acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_link_md(uri: &str, path_style: PathStyle) -> String {
|
||||
if let Some(uri) = MentionUri::parse(uri, path_style).log_err() {
|
||||
fn resource_link_md(uri: &str) -> String {
|
||||
if let Some(uri) = MentionUri::parse(uri).log_err() {
|
||||
uri.as_link().to_string()
|
||||
} else {
|
||||
uri.to_string()
|
||||
@@ -588,7 +577,6 @@ impl ToolCallContent {
|
||||
pub fn from_acp(
|
||||
content: acp::ToolCallContent,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Self> {
|
||||
@@ -596,7 +584,6 @@ impl ToolCallContent {
|
||||
acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new(
|
||||
content,
|
||||
&language_registry,
|
||||
path_style,
|
||||
cx,
|
||||
))),
|
||||
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
|
||||
@@ -620,7 +607,6 @@ impl ToolCallContent {
|
||||
&mut self,
|
||||
new: acp::ToolCallContent,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<()> {
|
||||
@@ -636,7 +622,7 @@ impl ToolCallContent {
|
||||
};
|
||||
|
||||
if needs_update {
|
||||
*self = Self::from_acp(new, language_registry, path_style, terminals, cx)?;
|
||||
*self = Self::from_acp(new, language_registry, terminals, cx)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -820,15 +806,6 @@ pub struct AcpThread {
|
||||
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
|
||||
}
|
||||
|
||||
impl From<&AcpThread> for ActionLogTelemetry {
|
||||
fn from(value: &AcpThread) -> Self {
|
||||
Self {
|
||||
agent_telemetry_id: value.connection().telemetry_id(),
|
||||
session_id: value.session_id.0.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AcpThreadEvent {
|
||||
NewEntry,
|
||||
@@ -1165,7 +1142,6 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let entries_len = self.entries.len();
|
||||
|
||||
if let Some(last_entry) = self.entries.last_mut()
|
||||
@@ -1177,12 +1153,12 @@ impl AcpThread {
|
||||
}) = last_entry
|
||||
{
|
||||
*id = message_id.or(id.take());
|
||||
content.append(chunk.clone(), &language_registry, path_style, cx);
|
||||
content.append(chunk.clone(), &language_registry, cx);
|
||||
chunks.push(chunk);
|
||||
let idx = entries_len - 1;
|
||||
cx.emit(AcpThreadEvent::EntryUpdated(idx));
|
||||
} else {
|
||||
let content = ContentBlock::new(chunk.clone(), &language_registry, path_style, cx);
|
||||
let content = ContentBlock::new(chunk.clone(), &language_registry, cx);
|
||||
self.push_entry(
|
||||
AgentThreadEntry::UserMessage(UserMessage {
|
||||
id: message_id,
|
||||
@@ -1202,7 +1178,6 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let entries_len = self.entries.len();
|
||||
if let Some(last_entry) = self.entries.last_mut()
|
||||
&& let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry
|
||||
@@ -1212,10 +1187,10 @@ impl AcpThread {
|
||||
match (chunks.last_mut(), is_thought) {
|
||||
(Some(AssistantMessageChunk::Message { block }), false)
|
||||
| (Some(AssistantMessageChunk::Thought { block }), true) => {
|
||||
block.append(chunk, &language_registry, path_style, cx)
|
||||
block.append(chunk, &language_registry, cx)
|
||||
}
|
||||
_ => {
|
||||
let block = ContentBlock::new(chunk, &language_registry, path_style, cx);
|
||||
let block = ContentBlock::new(chunk, &language_registry, cx);
|
||||
if is_thought {
|
||||
chunks.push(AssistantMessageChunk::Thought { block })
|
||||
} else {
|
||||
@@ -1224,7 +1199,7 @@ impl AcpThread {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let block = ContentBlock::new(chunk, &language_registry, path_style, cx);
|
||||
let block = ContentBlock::new(chunk, &language_registry, cx);
|
||||
let chunk = if is_thought {
|
||||
AssistantMessageChunk::Thought { block }
|
||||
} else {
|
||||
@@ -1276,7 +1251,6 @@ impl AcpThread {
|
||||
) -> Result<()> {
|
||||
let update = update.into();
|
||||
let languages = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
|
||||
let ix = match self.index_for_tool_call(update.id()) {
|
||||
Some(ix) => ix,
|
||||
@@ -1293,7 +1267,6 @@ impl AcpThread {
|
||||
meta: None,
|
||||
}),
|
||||
&languages,
|
||||
path_style,
|
||||
cx,
|
||||
))],
|
||||
status: ToolCallStatus::Failed,
|
||||
@@ -1313,7 +1286,7 @@ impl AcpThread {
|
||||
match update {
|
||||
ToolCallUpdate::UpdateFields(update) => {
|
||||
let location_updated = update.fields.locations.is_some();
|
||||
call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?;
|
||||
call.update_fields(update.fields, languages, &self.terminals, cx)?;
|
||||
if location_updated {
|
||||
self.resolve_locations(update.id, cx);
|
||||
}
|
||||
@@ -1352,32 +1325,14 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), acp::Error> {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let id = update.id.clone();
|
||||
|
||||
let agent = self.connection().telemetry_id();
|
||||
let session = self.session_id();
|
||||
if let ToolCallStatus::Completed | ToolCallStatus::Failed = status {
|
||||
let status = if matches!(status, ToolCallStatus::Completed) {
|
||||
"completed"
|
||||
} else {
|
||||
"failed"
|
||||
};
|
||||
telemetry::event!("Agent Tool Call Completed", agent, session, status);
|
||||
}
|
||||
|
||||
if let Some(ix) = self.index_for_tool_call(&id) {
|
||||
let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
call.update_fields(
|
||||
update.fields,
|
||||
language_registry,
|
||||
path_style,
|
||||
&self.terminals,
|
||||
cx,
|
||||
)?;
|
||||
call.update_fields(update.fields, language_registry, &self.terminals, cx)?;
|
||||
call.status = status;
|
||||
|
||||
cx.emit(AcpThreadEvent::EntryUpdated(ix));
|
||||
@@ -1386,7 +1341,6 @@ impl AcpThread {
|
||||
update.try_into()?,
|
||||
status,
|
||||
language_registry,
|
||||
self.project.read(cx).path_style(cx),
|
||||
&self.terminals,
|
||||
cx,
|
||||
)?;
|
||||
@@ -1666,7 +1620,6 @@ impl AcpThread {
|
||||
let block = ContentBlock::new_combined(
|
||||
message.clone(),
|
||||
self.project.read(cx).languages().clone(),
|
||||
self.project.read(cx).path_style(cx),
|
||||
cx,
|
||||
);
|
||||
let request = acp::PromptRequest {
|
||||
@@ -1889,7 +1842,6 @@ impl AcpThread {
|
||||
return Task::ready(Err(anyhow!("not supported")));
|
||||
};
|
||||
|
||||
let telemetry = ActionLogTelemetry::from(&*self);
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -1898,9 +1850,8 @@ impl AcpThread {
|
||||
this.entries.truncate(ix);
|
||||
cx.emit(AcpThreadEvent::EntriesRemoved(range));
|
||||
}
|
||||
this.action_log().update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
})
|
||||
this.action_log()
|
||||
.update(cx, |action_log, cx| action_log.reject_all_edits(cx))
|
||||
})?
|
||||
.await;
|
||||
Ok(())
|
||||
@@ -2162,9 +2113,17 @@ impl AcpThread {
|
||||
) -> Task<Result<Entity<Terminal>>> {
|
||||
let env = match &cwd {
|
||||
Some(dir) => self.project.update(cx, |project, cx| {
|
||||
project.environment().update(cx, |env, cx| {
|
||||
env.directory_environment(dir.as_path().into(), cx)
|
||||
})
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
@@ -2377,6 +2336,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3634,10 +3595,6 @@ mod tests {
|
||||
}
|
||||
|
||||
impl AgentConnection for FakeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"fake"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&self.auth_methods
|
||||
}
|
||||
|
||||
@@ -20,8 +20,6 @@ impl UserMessageId {
|
||||
}
|
||||
|
||||
pub trait AgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str;
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -108,6 +106,9 @@ pub trait AgentSessionSetTitle {
|
||||
}
|
||||
|
||||
pub trait AgentTelemetry {
|
||||
/// The name of the agent used for telemetry.
|
||||
fn agent_name(&self) -> String;
|
||||
|
||||
/// A representation of the current thread state that can be serialized for
|
||||
/// storage with telemetry events.
|
||||
fn thread_data(
|
||||
@@ -317,10 +318,6 @@ mod test_support {
|
||||
}
|
||||
|
||||
impl AgentConnection for StubAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"stub"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&[]
|
||||
}
|
||||
|
||||
@@ -7,10 +7,10 @@ use std::{
|
||||
fmt,
|
||||
ops::RangeInclusive,
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
use ui::{App, IconName, SharedString};
|
||||
use url::Url;
|
||||
use util::paths::PathStyle;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
pub enum MentionUri {
|
||||
@@ -49,7 +49,7 @@ pub enum MentionUri {
|
||||
}
|
||||
|
||||
impl MentionUri {
|
||||
pub fn parse(input: &str, path_style: PathStyle) -> Result<Self> {
|
||||
pub fn parse(input: &str) -> Result<Self> {
|
||||
fn parse_line_range(fragment: &str) -> Result<RangeInclusive<u32>> {
|
||||
let range = fragment
|
||||
.strip_prefix("L")
|
||||
@@ -74,34 +74,25 @@ impl MentionUri {
|
||||
let path = url.path();
|
||||
match url.scheme() {
|
||||
"file" => {
|
||||
let path = if path_style.is_windows() {
|
||||
path.trim_start_matches("/")
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
let path = url.to_file_path().ok().context("Extracting file path")?;
|
||||
if let Some(fragment) = url.fragment() {
|
||||
let line_range = parse_line_range(fragment)?;
|
||||
if let Some(name) = single_query_param(&url, "symbol")? {
|
||||
Ok(Self::Symbol {
|
||||
name,
|
||||
abs_path: path.into(),
|
||||
abs_path: path,
|
||||
line_range,
|
||||
})
|
||||
} else {
|
||||
Ok(Self::Selection {
|
||||
abs_path: Some(path.into()),
|
||||
abs_path: Some(path),
|
||||
line_range,
|
||||
})
|
||||
}
|
||||
} else if input.ends_with("/") {
|
||||
Ok(Self::Directory {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
Ok(Self::Directory { abs_path: path })
|
||||
} else {
|
||||
Ok(Self::File {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
Ok(Self::File { abs_path: path })
|
||||
}
|
||||
}
|
||||
"zed" => {
|
||||
@@ -222,14 +213,18 @@ impl MentionUri {
|
||||
pub fn to_uri(&self) -> Url {
|
||||
match self {
|
||||
MentionUri::File { abs_path } => {
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/file");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
|
||||
MentionUri::Directory { abs_path } => {
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/directory");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::Symbol {
|
||||
@@ -237,9 +232,10 @@ impl MentionUri {
|
||||
name,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
url.query_pairs_mut().append_pair("symbol", name);
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path(&format!("/agent/symbol/{name}"));
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
line_range.start() + 1,
|
||||
@@ -251,14 +247,13 @@ impl MentionUri {
|
||||
abs_path,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url = if let Some(path) = abs_path {
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&path.to_string_lossy());
|
||||
url
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
if let Some(abs_path) = abs_path {
|
||||
url.set_path("/agent/selection");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
} else {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/untitled-buffer");
|
||||
url
|
||||
};
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
@@ -293,6 +288,14 @@ impl MentionUri {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for MentionUri {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
Self::parse(s)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MentionLink<'a>(&'a MentionUri);
|
||||
|
||||
impl fmt::Display for MentionLink<'_> {
|
||||
@@ -335,81 +338,93 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_parse_file_uri() {
|
||||
let file_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::File { abs_path } => {
|
||||
assert_eq!(abs_path, Path::new(path!("/path/to/file.rs")));
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs"));
|
||||
}
|
||||
_ => panic!("Expected File variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/file"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_directory_uri() {
|
||||
let file_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap();
|
||||
let old_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Directory { abs_path } => {
|
||||
assert_eq!(abs_path, Path::new(path!("/path/to/dir/")));
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/"));
|
||||
}
|
||||
_ => panic!("Expected Directory variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_directory_uri_without_slash() {
|
||||
let uri = MentionUri::Directory {
|
||||
abs_path: PathBuf::from(path!("/path/to/dir/")),
|
||||
abs_path: PathBuf::from(path!("/path/to/dir")),
|
||||
};
|
||||
let expected = uri!("file:///path/to/dir/");
|
||||
assert_eq!(uri.to_uri().to_string(), expected);
|
||||
let uri_string = uri.to_uri().to_string();
|
||||
assert!(uri_string.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_symbol_uri() {
|
||||
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(symbol_uri, PathStyle::local()).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Symbol {
|
||||
abs_path: path,
|
||||
name,
|
||||
line_range,
|
||||
} => {
|
||||
assert_eq!(path, Path::new(path!("/path/to/file.rs")));
|
||||
assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs"));
|
||||
assert_eq!(name, "MySymbol");
|
||||
assert_eq!(line_range.start(), &9);
|
||||
assert_eq!(line_range.end(), &19);
|
||||
}
|
||||
_ => panic!("Expected Symbol variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), symbol_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_selection_uri() {
|
||||
let selection_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap();
|
||||
let old_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Selection {
|
||||
abs_path: path,
|
||||
line_range,
|
||||
} => {
|
||||
assert_eq!(path.as_ref().unwrap(), Path::new(path!("/path/to/file.rs")));
|
||||
assert_eq!(
|
||||
path.as_ref().unwrap().to_str().unwrap(),
|
||||
path!("/path/to/file.rs")
|
||||
);
|
||||
assert_eq!(line_range.start(), &4);
|
||||
assert_eq!(line_range.end(), &14);
|
||||
}
|
||||
_ => panic!("Expected Selection variant"),
|
||||
}
|
||||
assert_eq!(parsed.to_uri().to_string(), selection_uri);
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/selection"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_untitled_selection_uri() {
|
||||
let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10");
|
||||
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap();
|
||||
let parsed = MentionUri::parse(selection_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Selection {
|
||||
abs_path: None,
|
||||
@@ -426,7 +441,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_thread_uri() {
|
||||
let thread_uri = "zed:///agent/thread/session123?name=Thread+name";
|
||||
let parsed = MentionUri::parse(thread_uri, PathStyle::local()).unwrap();
|
||||
let parsed = MentionUri::parse(thread_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Thread {
|
||||
id: thread_id,
|
||||
@@ -443,7 +458,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_rule_uri() {
|
||||
let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule";
|
||||
let parsed = MentionUri::parse(rule_uri, PathStyle::local()).unwrap();
|
||||
let parsed = MentionUri::parse(rule_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Rule { id, name } => {
|
||||
assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52");
|
||||
@@ -457,7 +472,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_fetch_http_uri() {
|
||||
let http_uri = "http://example.com/path?query=value#fragment";
|
||||
let parsed = MentionUri::parse(http_uri, PathStyle::local()).unwrap();
|
||||
let parsed = MentionUri::parse(http_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Fetch { url } => {
|
||||
assert_eq!(url.to_string(), http_uri);
|
||||
@@ -470,7 +485,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_fetch_https_uri() {
|
||||
let https_uri = "https://example.com/api/endpoint";
|
||||
let parsed = MentionUri::parse(https_uri, PathStyle::local()).unwrap();
|
||||
let parsed = MentionUri::parse(https_uri).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Fetch { url } => {
|
||||
assert_eq!(url.to_string(), https_uri);
|
||||
@@ -482,55 +497,40 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_invalid_scheme() {
|
||||
assert!(MentionUri::parse("ftp://example.com", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("ssh://example.com", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("unknown://example.com", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("ftp://example.com").is_err());
|
||||
assert!(MentionUri::parse("ssh://example.com").is_err());
|
||||
assert!(MentionUri::parse("unknown://example.com").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_zed_path() {
|
||||
assert!(MentionUri::parse("zed:///invalid/path", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("zed:///agent/unknown/test", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("zed:///invalid/path").is_err());
|
||||
assert!(MentionUri::parse("zed:///agent/unknown/test").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_line_range_format() {
|
||||
// Missing L prefix
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#10:20"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#10:20")).is_err());
|
||||
|
||||
// Missing colon separator
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L1020"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1020")).is_err());
|
||||
|
||||
// Invalid numbers
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20")).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_query_parameters() {
|
||||
// Invalid query parameter name
|
||||
assert!(
|
||||
MentionUri::parse(
|
||||
uri!("file:///path/to/file.rs#L10:20?invalid=test"),
|
||||
PathStyle::local()
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:20?invalid=test")).is_err());
|
||||
|
||||
// Too many query parameters
|
||||
assert!(
|
||||
MentionUri::parse(
|
||||
uri!("file:///path/to/file.rs#L10:20?symbol=test&another=param"),
|
||||
PathStyle::local()
|
||||
)
|
||||
MentionUri::parse(uri!(
|
||||
"file:///path/to/file.rs#L10:20?symbol=test&another=param"
|
||||
))
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
@@ -538,14 +538,8 @@ mod tests {
|
||||
#[test]
|
||||
fn test_zero_based_line_numbers() {
|
||||
// Test that 0-based line numbers are rejected (should be 1-based)
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L0:10"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L1:0"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L0:0"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:10")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1:0")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:0")).is_err());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,8 +5,10 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, Task};
|
||||
use language::LanguageRegistry;
|
||||
use markdown::Markdown;
|
||||
use project::Project;
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant};
|
||||
use task::Shell;
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use util::get_default_system_shell_preferring_bash;
|
||||
|
||||
pub struct Terminal {
|
||||
@@ -185,9 +187,17 @@ pub async fn create_terminal_entity(
|
||||
let mut env = if let Some(dir) = &cwd {
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.environment().update(cx, |env, cx| {
|
||||
env.directory_environment(dir.clone().into(), cx)
|
||||
})
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.clone().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
|
||||
@@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Tooltip, WithScrollbar, prelude::*};
|
||||
use ui::{Tooltip, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{
|
||||
Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
|
||||
@@ -259,15 +259,6 @@ impl AcpTools {
|
||||
serde_json::to_string_pretty(&messages).ok()
|
||||
}
|
||||
|
||||
fn clear_messages(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(connection) = self.watched_connection.as_mut() {
|
||||
connection.messages.clear();
|
||||
connection.list_state.reset(0);
|
||||
self.expanded.clear();
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn render_message(
|
||||
&mut self,
|
||||
index: usize,
|
||||
@@ -291,19 +282,17 @@ impl AcpTools {
|
||||
let expanded = self.expanded.contains(&index);
|
||||
|
||||
v_flex()
|
||||
.id(index)
|
||||
.group("message")
|
||||
.cursor_pointer()
|
||||
.font_buffer(cx)
|
||||
.w_full()
|
||||
.px_4()
|
||||
.py_3()
|
||||
.pl_4()
|
||||
.pr_5()
|
||||
.gap_2()
|
||||
.items_start()
|
||||
.text_size(base_size)
|
||||
.border_color(colors.border)
|
||||
.border_b_1()
|
||||
.gap_2()
|
||||
.items_start()
|
||||
.font_buffer(cx)
|
||||
.text_size(base_size)
|
||||
.id(index)
|
||||
.group("message")
|
||||
.hover(|this| this.bg(colors.element_background.opacity(0.5)))
|
||||
.on_click(cx.listener(move |this, _, _, cx| {
|
||||
if this.expanded.contains(&index) {
|
||||
@@ -325,14 +314,15 @@ impl AcpTools {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.items_center()
|
||||
.flex_shrink_0()
|
||||
.child(match message.direction {
|
||||
acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown)
|
||||
.color(Color::Error)
|
||||
.size(IconSize::Small),
|
||||
acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
acp::StreamMessageDirection::Incoming => {
|
||||
ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error)
|
||||
}
|
||||
acp::StreamMessageDirection::Outgoing => {
|
||||
ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success)
|
||||
}
|
||||
})
|
||||
.child(
|
||||
Label::new(message.name.clone())
|
||||
@@ -502,7 +492,7 @@ impl Focusable for AcpTools {
|
||||
}
|
||||
|
||||
impl Render for AcpTools {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
@@ -517,19 +507,13 @@ impl Render for AcpTools {
|
||||
.child("No messages recorded yet")
|
||||
.into_any()
|
||||
} else {
|
||||
div()
|
||||
.size_full()
|
||||
.flex_grow()
|
||||
.child(
|
||||
list(
|
||||
connection.list_state.clone(),
|
||||
cx.processor(Self::render_message),
|
||||
)
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.size_full(),
|
||||
)
|
||||
.vertical_scrollbar_for(connection.list_state.clone(), window, cx)
|
||||
.into_any()
|
||||
list(
|
||||
connection.list_state.clone(),
|
||||
cx.processor(Self::render_message),
|
||||
)
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.flex_grow()
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
None => h_flex()
|
||||
@@ -563,16 +547,10 @@ impl Render for AcpToolsToolbarItemView {
|
||||
};
|
||||
|
||||
let acp_tools = acp_tools.clone();
|
||||
let has_messages = acp_tools
|
||||
.read(cx)
|
||||
.watched_connection
|
||||
.as_ref()
|
||||
.is_some_and(|connection| !connection.messages.is_empty());
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child({
|
||||
let acp_tools = acp_tools.clone();
|
||||
.child(
|
||||
IconButton::new(
|
||||
"copy_all_messages",
|
||||
if self.just_copied {
|
||||
@@ -587,7 +565,13 @@ impl Render for AcpToolsToolbarItemView {
|
||||
} else {
|
||||
"Copy All Messages"
|
||||
}))
|
||||
.disabled(!has_messages)
|
||||
.disabled(
|
||||
acp_tools
|
||||
.read(cx)
|
||||
.watched_connection
|
||||
.as_ref()
|
||||
.is_none_or(|connection| connection.messages.is_empty()),
|
||||
)
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
if let Some(content) = acp_tools.read(cx).serialize_observed_messages() {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(content));
|
||||
@@ -602,18 +586,7 @@ impl Render for AcpToolsToolbarItemView {
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}))
|
||||
})
|
||||
.child(
|
||||
IconButton::new("clear_messages", IconName::Trash)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Clear Messages"))
|
||||
.disabled(!has_messages)
|
||||
.on_click(cx.listener(move |_this, _, _window, cx| {
|
||||
acp_tools.update(cx, |acp_tools, cx| {
|
||||
acp_tools.clear_messages(cx);
|
||||
});
|
||||
})),
|
||||
})),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
telemetry.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
|
||||
@@ -3,9 +3,7 @@ use buffer_diff::BufferDiff;
|
||||
use clock;
|
||||
use collections::BTreeMap;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{
|
||||
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
@@ -33,6 +31,71 @@ impl ActionLog {
|
||||
&self.project
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read or notification
|
||||
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
|
||||
let diffs = self
|
||||
.tracked_buffers
|
||||
.values()
|
||||
.filter_map(|tracked| {
|
||||
if !tracked.may_have_unnotified_user_edits {
|
||||
return None;
|
||||
}
|
||||
|
||||
let text_with_latest_user_edits = tracked.diff_base.to_string();
|
||||
let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
|
||||
if text_with_latest_user_edits == text_with_last_seen_user_edits {
|
||||
return None;
|
||||
}
|
||||
let patch = language::unified_diff(
|
||||
&text_with_last_seen_user_edits,
|
||||
&text_with_latest_user_edits,
|
||||
);
|
||||
|
||||
let buffer = tracked.buffer.clone();
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| {
|
||||
let mut path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
if file.path_style(cx).is_windows() {
|
||||
path = path.replace('\\', "/");
|
||||
}
|
||||
path
|
||||
})
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
result.push_str(&format!("--- a/{}\n", file_path));
|
||||
result.push_str(&format!("+++ b/{}\n", file_path));
|
||||
result.push_str(&patch);
|
||||
|
||||
Some(result)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if diffs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let unified_diff = diffs.join("\n\n");
|
||||
Some(unified_diff)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read/notification
|
||||
/// and mark them as notified
|
||||
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
|
||||
let patch = self.unnotified_user_edits(cx);
|
||||
self.tracked_buffers.values_mut().for_each(|tracked| {
|
||||
tracked.may_have_unnotified_user_edits = false;
|
||||
tracked.last_seen_base = tracked.diff_base.clone();
|
||||
});
|
||||
patch
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -82,26 +145,31 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let last_seen_base;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
last_seen_base = Rope::default();
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
last_seen_base = diff_base.clone();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
last_seen_base,
|
||||
unreviewed_edits,
|
||||
snapshot: text_snapshot,
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
may_have_unnotified_user_edits: false,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
@@ -252,9 +320,10 @@ impl ActionLog {
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
let mut has_user_changes = false;
|
||||
async move {
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
has_user_changes = apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
@@ -262,13 +331,22 @@ impl ActionLog {
|
||||
);
|
||||
}
|
||||
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
(Arc::new(base_text.to_string()), base_text, has_user_changes)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")
|
||||
.unwrap();
|
||||
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
|
||||
})?;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
@@ -487,17 +565,14 @@ impl ActionLog {
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_range: Range<impl language::ToPoint>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => {
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -506,6 +581,7 @@ impl ActionLog {
|
||||
let buffer_range =
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
@@ -537,7 +613,6 @@ impl ActionLog {
|
||||
.collect::<String>(),
|
||||
);
|
||||
delta += edit.new_len() as i32 - edit.old_len() as i32;
|
||||
metrics.add_edit(edit);
|
||||
false
|
||||
}
|
||||
});
|
||||
@@ -549,24 +624,19 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
}
|
||||
}
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_accepted_edits(&telemetry, metrics);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_ranges: Vec<Range<impl language::ToPoint>>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
let task = match &tracked_buffer.status {
|
||||
match &tracked_buffer.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
} => {
|
||||
@@ -616,7 +686,6 @@ impl ActionLog {
|
||||
}
|
||||
};
|
||||
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
task
|
||||
@@ -630,7 +699,6 @@ impl ActionLog {
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -670,7 +738,6 @@ impl ActionLog {
|
||||
}
|
||||
|
||||
if revert {
|
||||
metrics.add_edit(edit);
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
@@ -691,25 +758,12 @@ impl ActionLog {
|
||||
self.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))
|
||||
}
|
||||
};
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_rejected_edits(&telemetry, metrics);
|
||||
}
|
||||
task
|
||||
}
|
||||
|
||||
pub fn keep_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.tracked_buffers.retain(|buffer, tracked_buffer| {
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
if let Some(telemetry) = telemetry.as_ref() {
|
||||
telemetry_report_accepted_edits(telemetry, metrics);
|
||||
}
|
||||
match tracked_buffer.status {
|
||||
pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
|
||||
self.tracked_buffers
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
|
||||
@@ -720,24 +774,13 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let reject = self.reject_edits_in_ranges(
|
||||
buffer,
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
telemetry.clone(),
|
||||
cx,
|
||||
);
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
@@ -745,7 +788,8 @@ impl ActionLog {
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
cx.background_spawn(async move {
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
@@ -775,61 +819,6 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ActionLogTelemetry {
|
||||
pub agent_telemetry_id: &'static str,
|
||||
pub session_id: Arc<str>,
|
||||
}
|
||||
|
||||
struct ActionLogMetrics {
|
||||
lines_removed: u32,
|
||||
lines_added: u32,
|
||||
language: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ActionLogMetrics {
|
||||
fn for_buffer(buffer: &Buffer) -> Self {
|
||||
Self {
|
||||
language: buffer.language().map(|l| l.name().0),
|
||||
lines_removed: 0,
|
||||
lines_added: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edits(&mut self, edits: &[Edit<u32>]) {
|
||||
for edit in edits {
|
||||
self.add_edit(edit);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edit(&mut self, edit: &Edit<u32>) {
|
||||
self.lines_added += edit.new_len();
|
||||
self.lines_removed += edit.old_len();
|
||||
}
|
||||
}
|
||||
|
||||
fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Accepted",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Rejected",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn apply_non_conflicting_edits(
|
||||
patch: &Patch<u32>,
|
||||
edits: Vec<Edit<u32>>,
|
||||
@@ -960,12 +949,14 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
last_seen_base: Rope,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
may_have_unnotified_user_edits: bool,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
@@ -996,6 +987,7 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
@@ -1013,6 +1005,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1072,7 +1066,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -1088,7 +1082,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1173,7 +1167,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1270,7 +1264,111 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_user_edits_notifications(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"file": indoc! {"
|
||||
abc
|
||||
def
|
||||
ghi
|
||||
jkl
|
||||
mno"}}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Agent edits
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abc
|
||||
deF
|
||||
GHI
|
||||
jkl
|
||||
mno"}
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\nghi\n".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
// User edits
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
(Point::new(0, 2)..Point::new(0, 2), "X"),
|
||||
(Point::new(3, 0)..Point::new(3, 0), "Y"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abXc
|
||||
deF
|
||||
GHI
|
||||
Yjkl
|
||||
mno"}
|
||||
);
|
||||
|
||||
// User edits should be stored separately from agent's
|
||||
let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
assert_eq!(
|
||||
user_edits.expect("should have some user edits"),
|
||||
indoc! {"
|
||||
--- a/dir/file
|
||||
+++ b/dir/file
|
||||
@@ -1,5 +1,5 @@
|
||||
-abc
|
||||
+abXc
|
||||
def
|
||||
ghi
|
||||
-jkl
|
||||
+Yjkl
|
||||
mno
|
||||
"}
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1329,7 +1427,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1381,7 +1479,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1461,7 +1559,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1644,7 +1742,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1679,7 +1776,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(1, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1707,7 +1803,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1782,7 +1877,7 @@ mod tests {
|
||||
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(5, 3));
|
||||
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
|
||||
.detach();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1843,7 +1938,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1899,7 +1993,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 11)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1962,7 +2055,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(100, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2010,7 +2102,7 @@ mod tests {
|
||||
|
||||
// User accepts the single hunk
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -2031,7 +2123,7 @@ mod tests {
|
||||
// User rejects the hunk
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2075,7 +2167,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
// User clicks "Accept All"
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(cx));
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
|
||||
@@ -2094,7 +2186,7 @@ mod tests {
|
||||
|
||||
// User clicks "Reject All"
|
||||
action_log
|
||||
.update(cx, |log, cx| log.reject_all_edits(None, cx))
|
||||
.update(cx, |log, cx| log.reject_all_edits(cx))
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
@@ -2134,7 +2226,7 @@ mod tests {
|
||||
action_log.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("keeping edits in range {:?}", range);
|
||||
log.keep_edits_in_range(buffer.clone(), range, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), range, cx)
|
||||
});
|
||||
}
|
||||
25..50 => {
|
||||
@@ -2142,7 +2234,7 @@ mod tests {
|
||||
.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("rejecting edits in range {:?}", range);
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2396,4 +2488,61 @@ mod tests {
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_format_patch(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"test.txt": "line 1\nline 2\nline 3\n"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/test.txt", cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
// Track the buffer and mark it as read first
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
|
||||
// Make some edits to create a patch
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
|
||||
.unwrap(); // Replace "line2" with "CHANGED"
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Get the patch
|
||||
let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
|
||||
// Verify the patch format contains expected unified diff elements
|
||||
assert_eq!(
|
||||
patch.unwrap(),
|
||||
indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ anyhow.workspace = true
|
||||
auto_update.workspace = true
|
||||
editor.workspace = true
|
||||
extension_host.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
|
||||
@@ -11,7 +11,7 @@ use language::{
|
||||
LanguageServerStatusUpdate, ServerHealth,
|
||||
};
|
||||
use project::{
|
||||
LanguageServerProgress, LspStoreEvent, ProgressToken, Project, ProjectEnvironmentEvent,
|
||||
LanguageServerProgress, LspStoreEvent, Project, ProjectEnvironmentEvent,
|
||||
git_store::{GitStoreEvent, Repository},
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
@@ -51,7 +51,6 @@ pub struct ActivityIndicator {
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs_jobs: Vec<fs::JobInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -62,7 +61,7 @@ struct ServerStatus {
|
||||
|
||||
struct PendingWork<'a> {
|
||||
language_server_id: LanguageServerId,
|
||||
progress_token: &'a ProgressToken,
|
||||
progress_token: &'a str,
|
||||
progress: &'a LanguageServerProgress,
|
||||
}
|
||||
|
||||
@@ -100,27 +99,6 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let mut job_events = fs.subscribe_to_jobs();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some(job_event) = job_events.next().await {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
match job_event {
|
||||
fs::JobEvent::Started { info } => {
|
||||
this.fs_jobs.retain(|j| j.id != info.id);
|
||||
this.fs_jobs.push(info);
|
||||
}
|
||||
fs::JobEvent::Completed { id } => {
|
||||
this.fs_jobs.retain(|j| j.id != id);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|activity_indicator, _, event, cx| {
|
||||
@@ -223,8 +201,7 @@ impl ActivityIndicator {
|
||||
statuses: Vec::new(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu_handle: PopoverMenuHandle::default(),
|
||||
fs_jobs: Vec::new(),
|
||||
context_menu_handle: Default::default(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -336,9 +313,9 @@ impl ActivityIndicator {
|
||||
let mut pending_work = status
|
||||
.pending_work
|
||||
.iter()
|
||||
.map(|(progress_token, progress)| PendingWork {
|
||||
.map(|(token, progress)| PendingWork {
|
||||
language_server_id: server_id,
|
||||
progress_token,
|
||||
progress_token: token.as_str(),
|
||||
progress,
|
||||
})
|
||||
.collect::<SmallVec<[_; 4]>>();
|
||||
@@ -381,7 +358,11 @@ impl ActivityIndicator {
|
||||
..
|
||||
}) = pending_work.next()
|
||||
{
|
||||
let mut message = progress.title.clone().unwrap_or(progress_token.to_string());
|
||||
let mut message = progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(progress_token)
|
||||
.to_string();
|
||||
|
||||
if let Some(percentage) = progress.percentage {
|
||||
write!(&mut message, " ({}%)", percentage).unwrap();
|
||||
@@ -455,23 +436,6 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
// Show any long-running fs command
|
||||
for fs_job in &self.fs_jobs {
|
||||
if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_rotate_animation(2)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: fs_job.message.clone().into(),
|
||||
on_click: None,
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
@@ -809,7 +773,7 @@ impl Render for ActivityIndicator {
|
||||
let Some(content) = self.content_to_render(cx) else {
|
||||
return result;
|
||||
};
|
||||
let activity_indicator = cx.entity().downgrade();
|
||||
let this = cx.entity().downgrade();
|
||||
let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
@@ -851,21 +815,22 @@ impl Render for ActivityIndicator {
|
||||
)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.menu(move |window, cx| {
|
||||
let strong_this = activity_indicator.upgrade()?;
|
||||
let strong_this = this.upgrade()?;
|
||||
let mut has_work = false;
|
||||
let menu = ContextMenu::build(window, cx, |mut menu, _, cx| {
|
||||
for work in strong_this.read(cx).pending_language_server_work(cx) {
|
||||
has_work = true;
|
||||
let activity_indicator = activity_indicator.clone();
|
||||
let this = this.clone();
|
||||
let mut title = work
|
||||
.progress
|
||||
.title
|
||||
.clone()
|
||||
.unwrap_or(work.progress_token.to_string());
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_owned();
|
||||
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.clone();
|
||||
let token = work.progress_token.to_string();
|
||||
let title = SharedString::from(title);
|
||||
menu = menu.custom_entry(
|
||||
move |_, _| {
|
||||
@@ -877,23 +842,18 @@ impl Render for ActivityIndicator {
|
||||
.into_any_element()
|
||||
},
|
||||
move |_, cx| {
|
||||
let token = token.clone();
|
||||
activity_indicator
|
||||
.update(cx, |activity_indicator, cx| {
|
||||
activity_indicator.project.update(
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
cx,
|
||||
|project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
);
|
||||
activity_indicator.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
this.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
|
||||
@@ -11,7 +11,7 @@ path = "src/agent.rs"
|
||||
[features]
|
||||
test-support = ["db/test-support"]
|
||||
eval = []
|
||||
unit-eval = []
|
||||
edit-agent-eval = []
|
||||
e2e = []
|
||||
|
||||
[lints]
|
||||
@@ -63,6 +63,7 @@ streaming_diff.workspace = true
|
||||
strsim.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -6,6 +6,7 @@ mod native_agent_server;
|
||||
pub mod outline;
|
||||
mod templates;
|
||||
mod thread;
|
||||
mod tool_schema;
|
||||
mod tools;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -217,7 +218,7 @@ impl LanguageModels {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
@@ -966,10 +967,6 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"zed"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -1038,13 +1035,12 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
let session_id = params.session_id.clone();
|
||||
log::info!("Received prompt request for session: {}", session_id);
|
||||
log::debug!("Prompt blocks count: {}", params.prompt.len());
|
||||
let path_style = self.0.read(cx).project.read(cx).path_style(cx);
|
||||
|
||||
self.run_turn(session_id, cx, move |thread, cx| {
|
||||
self.run_turn(session_id, cx, |thread, cx| {
|
||||
let content: Vec<UserMessageContent> = params
|
||||
.prompt
|
||||
.into_iter()
|
||||
.map(|block| UserMessageContent::from_content_block(block, path_style))
|
||||
.map(Into::into)
|
||||
.collect::<Vec<_>>();
|
||||
log::debug!("Converted prompt to message: {} chars", content.len());
|
||||
log::debug!("Message id: {:?}", id);
|
||||
@@ -1110,6 +1106,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentTelemetry for NativeAgentConnection {
|
||||
fn agent_name(&self) -> String {
|
||||
"Zed".into()
|
||||
}
|
||||
|
||||
fn thread_data(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
@@ -1626,7 +1626,9 @@ mod internal_tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
language::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1394,7 +1394,7 @@ mod tests {
|
||||
|
||||
async fn init_test(cx: &mut TestAppContext) -> EditAgent {
|
||||
cx.update(settings::init);
|
||||
|
||||
cx.update(Project::init_settings);
|
||||
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
@@ -13,15 +13,7 @@ const EDITS_END_TAG: &str = "</edits>";
|
||||
const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
|
||||
const SEPARATOR_MARKER: &str = "=======";
|
||||
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
|
||||
const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
|
||||
const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
|
||||
const END_TAGS: [&str; 5] = [
|
||||
OLD_TEXT_END_TAG,
|
||||
NEW_TEXT_END_TAG,
|
||||
EDITS_END_TAG,
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_2,
|
||||
];
|
||||
const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EditParserEvent {
|
||||
@@ -555,37 +547,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) {
|
||||
// This case is a regression with Claude Sonnet 4.5.
|
||||
// Sometimes Sonnet thinks that it's doing a tool call
|
||||
// and closes its response with '</parameter></invoke>'
|
||||
// instead of properly closing </new_text>
|
||||
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>some text</old_text><new_text>updated text</parameter></invoke>
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 2,
|
||||
mismatched_tags: 1
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_nested_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
@@ -1074,11 +1035,6 @@ mod tests {
|
||||
last_ix = chunk_ix;
|
||||
}
|
||||
|
||||
if new_text.is_some() {
|
||||
pending_edit.new_text = new_text.take().unwrap();
|
||||
edits.push(pending_edit);
|
||||
}
|
||||
|
||||
edits
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ use std::{
|
||||
use util::path;
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_extract_handle_command_output() {
|
||||
// Test how well agent generates multiple edit hunks.
|
||||
//
|
||||
@@ -108,7 +108,7 @@ fn eval_extract_handle_command_output() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_delete_run_git_blame() {
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
@@ -171,7 +171,7 @@ fn eval_delete_run_git_blame() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_translate_doc_comments() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -234,7 +234,7 @@ fn eval_translate_doc_comments() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -360,7 +360,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_disable_cursor_blinking() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -446,7 +446,7 @@ fn eval_disable_cursor_blinking() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_from_pixels_constructor() {
|
||||
// Results for 2025-06-13
|
||||
//
|
||||
@@ -656,7 +656,7 @@ fn eval_from_pixels_constructor() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_zode() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -763,7 +763,7 @@ fn eval_zode() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_add_overwrite_test() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -995,7 +995,7 @@ fn eval_add_overwrite_test() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
fn eval_create_empty_file() {
|
||||
// Check that Edit Agent can create a file without writing its
|
||||
// thoughts into it. This issue is not specific to empty files, but
|
||||
@@ -1468,9 +1468,14 @@ impl EditAgentTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap());
|
||||
cx.set_http_client(http_client);
|
||||
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
});
|
||||
@@ -1576,7 +1581,6 @@ impl EditAgentTest {
|
||||
let template = crate::SystemPromptTemplate {
|
||||
project: &project_context,
|
||||
available_tools: tool_names,
|
||||
model_name: None,
|
||||
};
|
||||
let templates = Templates::new();
|
||||
template.render(&templates).unwrap()
|
||||
|
||||
@@ -88,6 +88,8 @@ mod tests {
|
||||
async |fs, project, cx| {
|
||||
let auth = cx.update(|cx| {
|
||||
prompt_store::init(cx);
|
||||
terminal::init(cx);
|
||||
|
||||
let registry = language_model::LanguageModelRegistry::read_global(cx);
|
||||
let auth = registry
|
||||
.provider(&language_model::ANTHROPIC_PROVIDER_ID)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{Buffer, OutlineItem};
|
||||
use language::{Buffer, OutlineItem, ParseStatus};
|
||||
use regex::Regex;
|
||||
use std::fmt::Write;
|
||||
use text::Point;
|
||||
@@ -30,9 +30,10 @@ pub async fn get_buffer_content_or_outline(
|
||||
if file_size > AUTO_OUTLINE_SIZE {
|
||||
// For large files, use outline instead of full content
|
||||
// Wait until the buffer has been fully parsed, so we can read its outline
|
||||
buffer
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let outline_items = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
@@ -38,7 +38,6 @@ pub struct SystemPromptTemplate<'a> {
|
||||
#[serde(flatten)]
|
||||
pub project: &'a prompt_store::ProjectContext,
|
||||
pub available_tools: Vec<SharedString>,
|
||||
pub model_name: Option<String>,
|
||||
}
|
||||
|
||||
impl Template for SystemPromptTemplate<'_> {
|
||||
@@ -80,11 +79,9 @@ mod tests {
|
||||
let template = SystemPromptTemplate {
|
||||
project: &project,
|
||||
available_tools: vec!["echo".into()],
|
||||
model_name: Some("test-model".to_string()),
|
||||
};
|
||||
let templates = Templates::new();
|
||||
let rendered = template.render(&templates).unwrap();
|
||||
assert!(rendered.contains("## Fixing Diagnostics"));
|
||||
assert!(rendered.contains("test-model"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,12 +150,6 @@ Otherwise, follow debugging best practices:
|
||||
Operating System: {{os}}
|
||||
Default Shell: {{shell}}
|
||||
|
||||
{{#if model_name}}
|
||||
## Model Information
|
||||
|
||||
You are powered by the model named {{model_name}}.
|
||||
|
||||
{{/if}}
|
||||
{{#if (or has_rules has_user_rules)}}
|
||||
## User's Custom Instructions
|
||||
|
||||
|
||||
@@ -160,42 +160,6 @@ async fn test_system_prompt(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_system_prompt_without_tools(cx: &mut TestAppContext) {
|
||||
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
|
||||
let fake_model = model.as_fake();
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send(UserMessageId::new(), ["abc"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
let mut pending_completions = fake_model.pending_completions();
|
||||
assert_eq!(
|
||||
pending_completions.len(),
|
||||
1,
|
||||
"unexpected pending completions: {:?}",
|
||||
pending_completions
|
||||
);
|
||||
|
||||
let pending_completion = pending_completions.pop().unwrap();
|
||||
assert_eq!(pending_completion.messages[0].role, Role::System);
|
||||
|
||||
let system_message = &pending_completion.messages[0];
|
||||
let system_prompt = system_message.content[0].to_str().unwrap();
|
||||
assert!(
|
||||
!system_prompt.contains("## Tool Use"),
|
||||
"unexpected system message: {:?}",
|
||||
system_message
|
||||
);
|
||||
assert!(
|
||||
!system_prompt.contains("## Fixing Diagnostics"),
|
||||
"unexpected system message: {:?}",
|
||||
system_message
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
|
||||
@@ -933,7 +897,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Test that test-1 profile (default) has echo and delay tools
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-1".into()), cx);
|
||||
thread.set_profile(AgentProfileId("test-1".into()));
|
||||
thread.send(UserMessageId::new(), ["test"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -953,7 +917,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Switch to test-2 profile, and verify that it has only the infinite tool.
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-2".into()), cx);
|
||||
thread.set_profile(AgentProfileId("test-2".into()));
|
||||
thread.send(UserMessageId::new(), ["test2"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -1002,8 +966,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx)
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()))
|
||||
});
|
||||
|
||||
let mut mcp_tool_calls = setup_context_server(
|
||||
@@ -1169,8 +1133,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx);
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()));
|
||||
thread.add_tool(EchoTool);
|
||||
thread.add_tool(DelayTool);
|
||||
thread.add_tool(WordListTool);
|
||||
@@ -1851,6 +1815,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
// Initialize language model system with test provider
|
||||
cx.update(|cx| {
|
||||
gpui_tokio::init(cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
@@ -1858,7 +1823,9 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
agent_settings::init(cx);
|
||||
});
|
||||
cx.executor().forbid_parking();
|
||||
|
||||
@@ -2392,6 +2359,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
|
||||
match model {
|
||||
TestModel::Fake => {}
|
||||
@@ -2399,6 +2368,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
|
||||
@@ -30,17 +30,16 @@ use gpui::{
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
|
||||
LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
|
||||
LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::ProjectContext;
|
||||
use schemars::{JsonSchema, Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{LanguageModelSelection, Settings, update_settings_file};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
@@ -51,7 +50,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use std::{fmt::Write, path::PathBuf};
|
||||
use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock, paths::PathStyle};
|
||||
use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock};
|
||||
use uuid::Uuid;
|
||||
|
||||
const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user";
|
||||
@@ -799,8 +798,7 @@ impl Thread {
|
||||
let profile_id = db_thread
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
|
||||
let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
db_thread
|
||||
.model
|
||||
.and_then(|model| {
|
||||
@@ -813,16 +811,6 @@ impl Thread {
|
||||
.or_else(|| registry.default_model())
|
||||
.map(|model| model.model)
|
||||
});
|
||||
|
||||
if model.is_none() {
|
||||
model = Self::resolve_profile_model(&profile_id, cx);
|
||||
}
|
||||
if model.is_none() {
|
||||
model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
|
||||
registry.default_model().map(|model| model.model)
|
||||
});
|
||||
}
|
||||
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
|
||||
@@ -1019,17 +1007,8 @@ impl Thread {
|
||||
&self.profile_id
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if self.profile_id == profile_id {
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId) {
|
||||
self.profile_id = profile_id;
|
||||
|
||||
// Swap to the profile's preferred model when available.
|
||||
if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
|
||||
self.set_model(model, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) {
|
||||
@@ -1086,35 +1065,6 @@ impl Thread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Look up the active profile and resolve its preferred model if one is configured.
|
||||
fn resolve_profile_model(
|
||||
profile_id: &AgentProfileId,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selection = AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(profile_id)?
|
||||
.default_model
|
||||
.clone()?;
|
||||
Self::resolve_model_from_selection(&selection, cx)
|
||||
}
|
||||
|
||||
/// Translate a stored model selection into the configured model from the registry.
|
||||
fn resolve_model_from_selection(
|
||||
selection: &LanguageModelSelection,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selected = SelectedModel {
|
||||
provider: LanguageModelProviderId::from(selection.provider.0.clone()),
|
||||
model: LanguageModelId::from(selection.model.clone()),
|
||||
};
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry
|
||||
.select_model(&selected, cx)
|
||||
.map(|configured| configured.model)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resume(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -1866,15 +1816,9 @@ impl Thread {
|
||||
log::debug!("Completion intent: {:?}", completion_intent);
|
||||
log::debug!("Completion mode: {:?}", self.completion_mode);
|
||||
|
||||
let available_tools: Vec<_> = self
|
||||
.running_turn
|
||||
.as_ref()
|
||||
.map(|turn| turn.tools.keys().cloned().collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
log::debug!("Request includes {} tools", available_tools.len());
|
||||
let messages = self.build_request_messages(available_tools, cx);
|
||||
let messages = self.build_request_messages(cx);
|
||||
log::debug!("Request will include {} messages", messages.len());
|
||||
log::debug!("Request includes {} tools", tools.len());
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
thread_id: Some(self.id.to_string()),
|
||||
@@ -1965,11 +1909,7 @@ impl Thread {
|
||||
self.running_turn.as_ref()?.tools.get(name).cloned()
|
||||
}
|
||||
|
||||
fn build_request_messages(
|
||||
&self,
|
||||
available_tools: Vec<SharedString>,
|
||||
cx: &App,
|
||||
) -> Vec<LanguageModelRequestMessage> {
|
||||
fn build_request_messages(&self, cx: &App) -> Vec<LanguageModelRequestMessage> {
|
||||
log::trace!(
|
||||
"Building request messages from {} thread messages",
|
||||
self.messages.len()
|
||||
@@ -1977,8 +1917,7 @@ impl Thread {
|
||||
|
||||
let system_prompt = SystemPromptTemplate {
|
||||
project: self.project_context.read(cx),
|
||||
available_tools,
|
||||
model_name: self.model.as_ref().map(|m| m.name().0.to_string()),
|
||||
available_tools: self.tools.keys().cloned().collect(),
|
||||
}
|
||||
.render(&self.templates)
|
||||
.context("failed to build system prompt")
|
||||
@@ -2189,7 +2128,7 @@ where
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
language_model::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
crate::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
}
|
||||
|
||||
/// Some tools rely on a provider for the underlying billing or other reasons.
|
||||
@@ -2276,7 +2215,7 @@ where
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut json = serde_json::to_value(T::input_schema(format))?;
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
@@ -2599,8 +2538,8 @@ impl From<&str> for UserMessageContent {
|
||||
}
|
||||
}
|
||||
|
||||
impl UserMessageContent {
|
||||
pub fn from_content_block(value: acp::ContentBlock, path_style: PathStyle) -> Self {
|
||||
impl From<acp::ContentBlock> for UserMessageContent {
|
||||
fn from(value: acp::ContentBlock) -> Self {
|
||||
match value {
|
||||
acp::ContentBlock::Text(text_content) => Self::Text(text_content.text),
|
||||
acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)),
|
||||
@@ -2609,7 +2548,7 @@ impl UserMessageContent {
|
||||
Self::Text("[audio]".to_string())
|
||||
}
|
||||
acp::ContentBlock::ResourceLink(resource_link) => {
|
||||
match MentionUri::parse(&resource_link.uri, path_style) {
|
||||
match MentionUri::parse(&resource_link.uri) {
|
||||
Ok(uri) => Self::Mention {
|
||||
uri,
|
||||
content: String::new(),
|
||||
@@ -2622,7 +2561,7 @@ impl UserMessageContent {
|
||||
}
|
||||
acp::ContentBlock::Resource(resource) => match resource.resource {
|
||||
acp::EmbeddedResourceResource::TextResourceContents(resource) => {
|
||||
match MentionUri::parse(&resource.uri, path_style) {
|
||||
match MentionUri::parse(&resource.uri) {
|
||||
Ok(uri) => Self::Mention {
|
||||
uri,
|
||||
content: resource.text,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use language_model::LanguageModelToolSchemaFormat;
|
||||
use schemars::{
|
||||
JsonSchema, Schema,
|
||||
generate::SchemaSettings,
|
||||
@@ -6,16 +7,7 @@ use schemars::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Indicates the format used to define the input schema for a language model tool.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub enum LanguageModelToolSchemaFormat {
|
||||
/// A JSON schema, see https://json-schema.org
|
||||
JsonSchema,
|
||||
/// A subset of an OpenAPI 3.0 schema object supported by Google AI, see https://ai.google.dev/api/caching#Schema
|
||||
JsonSchemaSubset,
|
||||
}
|
||||
|
||||
pub fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
pub(crate) fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
let mut generator = match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
|
||||
@@ -165,7 +165,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
format: language_model::LanguageModelToolSchemaFormat,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
Ok(match schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
|
||||
@@ -562,6 +562,7 @@ fn resolve_path(
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates};
|
||||
use client::TelemetrySettings;
|
||||
use fs::Fs;
|
||||
use gpui::{TestAppContext, UpdateGlobal};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -1752,6 +1753,10 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
agent_settings::AgentSettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,6 +246,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -778,6 +778,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -223,6 +223,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -163,6 +163,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -509,6 +509,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ acp_tools.workspace = true
|
||||
acp_thread.workspace = true
|
||||
action_log.workspace = true
|
||||
agent-client-protocol.workspace = true
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
@@ -32,6 +33,7 @@ gpui.workspace = true
|
||||
gpui_tokio = { workspace = true, optional = true }
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -29,7 +29,6 @@ pub struct UnsupportedVersion;
|
||||
|
||||
pub struct AcpConnection {
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
auth_methods: Vec<acp::AuthMethod>,
|
||||
@@ -53,7 +52,6 @@ pub struct AcpSession {
|
||||
|
||||
pub async fn connect(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -62,7 +60,6 @@ pub async fn connect(
|
||||
) -> Result<Rc<dyn AgentConnection>> {
|
||||
let conn = AcpConnection::stdio(
|
||||
server_name,
|
||||
telemetry_id,
|
||||
command.clone(),
|
||||
root_dir,
|
||||
default_mode,
|
||||
@@ -78,7 +75,6 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
|
||||
impl AcpConnection {
|
||||
pub async fn stdio(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -136,7 +132,7 @@ impl AcpConnection {
|
||||
while let Ok(n) = stderr.read_line(&mut line).await
|
||||
&& n > 0
|
||||
{
|
||||
log::warn!("agent stderr: {}", line.trim());
|
||||
log::warn!("agent stderr: {}", &line);
|
||||
line.clear();
|
||||
}
|
||||
Ok(())
|
||||
@@ -182,7 +178,6 @@ impl AcpConnection {
|
||||
meta: Some(serde_json::json!({
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
"terminal_output": true,
|
||||
"terminal-auth": true,
|
||||
})),
|
||||
},
|
||||
client_info: Some(acp::Implementation {
|
||||
@@ -203,7 +198,6 @@ impl AcpConnection {
|
||||
root_dir: root_dir.to_owned(),
|
||||
connection,
|
||||
server_name,
|
||||
telemetry_id,
|
||||
sessions,
|
||||
agent_capabilities: response.agent_capabilities,
|
||||
default_mode,
|
||||
@@ -231,10 +225,6 @@ impl Drop for AcpConnection {
|
||||
}
|
||||
|
||||
impl AgentConnection for AcpConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
self.telemetry_id
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
|
||||
@@ -62,7 +62,6 @@ impl AgentServer for ClaudeCode {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -86,7 +85,6 @@ impl AgentServer for ClaudeCode {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -63,7 +63,6 @@ impl AgentServer for Codex {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -88,7 +87,6 @@ impl AgentServer for Codex {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -67,7 +67,6 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let default_mode = self.default_mode(cx);
|
||||
@@ -93,7 +92,6 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -6,9 +6,7 @@ use gpui::{AppContext, Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
#[cfg(test)]
|
||||
use project::agent_server_store::BuiltinAgentServerSettings;
|
||||
use project::{FakeFs, Project};
|
||||
#[cfg(test)]
|
||||
use settings::Settings;
|
||||
use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -454,22 +452,29 @@ pub use common_e2e_tests;
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
use settings::Settings;
|
||||
|
||||
env_logger::try_init().ok();
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = settings::SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = client::Client::production(cx);
|
||||
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client, cx);
|
||||
agent_settings::init(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
|
||||
#[cfg(test)]
|
||||
project::agent_server_store::AllAgentServersSettings::override_global(
|
||||
project::agent_server_store::AllAgentServersSettings {
|
||||
AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
claude: Some(BuiltinAgentServerSettings {
|
||||
path: Some("claude-code-acp".into()),
|
||||
args: None,
|
||||
|
||||
@@ -31,7 +31,6 @@ impl AgentServer for Gemini {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -65,7 +64,6 @@ impl AgentServer for Gemini {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
|
||||
use fs::Fs;
|
||||
use gpui::{App, SharedString};
|
||||
use settings::{
|
||||
AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
|
||||
SettingsContent, update_settings_file,
|
||||
AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
|
||||
update_settings_file,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -53,30 +53,19 @@ impl AgentProfile {
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
// Copy toggles from the base profile so the new profile starts with familiar defaults.
|
||||
let tools = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default();
|
||||
let enable_all_context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default();
|
||||
let context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.context_servers.clone())
|
||||
.unwrap_or_default();
|
||||
// Preserve the base profile's model preference when cloning into a new profile.
|
||||
let default_model = base_profile
|
||||
.as_ref()
|
||||
.and_then(|profile| profile.default_model.clone());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
@@ -107,8 +96,6 @@ pub struct AgentProfileSettings {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
pub enable_all_context_servers: bool,
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
/// Default language model to apply when this profile becomes active.
|
||||
pub default_model: Option<LanguageModelSelection>,
|
||||
}
|
||||
|
||||
impl AgentProfileSettings {
|
||||
@@ -157,7 +144,6 @@ impl AgentProfileSettings {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: self.default_model.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -167,23 +153,15 @@ impl AgentProfileSettings {
|
||||
|
||||
impl From<AgentProfileContent> for AgentProfileSettings {
|
||||
fn from(content: AgentProfileContent) -> Self {
|
||||
let AgentProfileContent {
|
||||
name,
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
} = content;
|
||||
|
||||
Self {
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: context_servers
|
||||
name: content.name.into(),
|
||||
tools: content.tools,
|
||||
enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: content
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| (server_id, preset.into()))
|
||||
.collect(),
|
||||
default_model,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
|
||||
NotifyWhenAgentWaiting, RegisterSetting, Settings,
|
||||
NotifyWhenAgentWaiting, Settings,
|
||||
};
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
@@ -19,7 +19,11 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread
|
||||
pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
|
||||
include_str!("prompts/summarize_thread_detailed_prompt.txt");
|
||||
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
pub fn init(cx: &mut App) {
|
||||
AgentSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AgentSettings {
|
||||
pub enabled: bool,
|
||||
pub button: bool,
|
||||
|
||||
@@ -253,22 +253,17 @@ impl ContextPickerCompletionProvider {
|
||||
) -> Option<Completion> {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
|
||||
let (abs_path, file_name) = match &symbol.path {
|
||||
SymbolLocation::InProject(project_path) => (
|
||||
project.read(cx).absolute_path(&project_path, cx)?,
|
||||
project_path.path.file_name()?.to_string().into(),
|
||||
),
|
||||
let label = CodeLabel::plain(symbol.name.clone(), None);
|
||||
|
||||
let abs_path = match &symbol.path {
|
||||
SymbolLocation::InProject(project_path) => {
|
||||
project.read(cx).absolute_path(&project_path, cx)?
|
||||
}
|
||||
SymbolLocation::OutsideProject {
|
||||
abs_path,
|
||||
signature: _,
|
||||
} => (
|
||||
PathBuf::from(abs_path.as_ref()),
|
||||
abs_path.file_name().map(|f| f.to_string_lossy())?,
|
||||
),
|
||||
} => PathBuf::from(abs_path.as_ref()),
|
||||
};
|
||||
|
||||
let label = build_symbol_label(&symbol.name, &file_name, symbol.range.start.0.row + 1, cx);
|
||||
|
||||
let uri = MentionUri::Symbol {
|
||||
abs_path,
|
||||
name: symbol.name.clone(),
|
||||
@@ -575,7 +570,6 @@ impl ContextPickerCompletionProvider {
|
||||
.unwrap_or_default();
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let include_root_name = workspace.visible_worktrees(cx).count() > 1;
|
||||
|
||||
if let Some(agent_panel) = workspace.panel::<AgentPanel>(cx)
|
||||
&& let Some(thread) = agent_panel.read(cx).active_agent_thread(cx)
|
||||
@@ -602,11 +596,7 @@ impl ContextPickerCompletionProvider {
|
||||
project
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|worktree| {
|
||||
let path_prefix = if include_root_name {
|
||||
worktree.read(cx).root_name().into()
|
||||
} else {
|
||||
RelPath::empty().into()
|
||||
};
|
||||
let path_prefix = worktree.read(cx).root_name().into();
|
||||
Match::File(FileMatch {
|
||||
mat: fuzzy::PathMatch {
|
||||
score: 1.,
|
||||
@@ -646,14 +636,16 @@ impl ContextPickerCompletionProvider {
|
||||
cx: &mut App,
|
||||
) -> Vec<ContextPickerEntry> {
|
||||
let embedded_context = self.prompt_capabilities.borrow().embedded_context;
|
||||
let mut entries = vec![
|
||||
ContextPickerEntry::Mode(ContextPickerMode::File),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Symbol),
|
||||
];
|
||||
|
||||
if embedded_context {
|
||||
entries.push(ContextPickerEntry::Mode(ContextPickerMode::Thread));
|
||||
}
|
||||
let mut entries = if embedded_context {
|
||||
vec![
|
||||
ContextPickerEntry::Mode(ContextPickerMode::File),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Symbol),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Thread),
|
||||
]
|
||||
} else {
|
||||
// File is always available, but we don't need a mode entry
|
||||
vec![]
|
||||
};
|
||||
|
||||
let has_selection = workspace
|
||||
.read(cx)
|
||||
@@ -682,17 +674,6 @@ impl ContextPickerCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
|
||||
label.push_str(symbol_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(&format!("{} L{}", file_name, line), comment_id);
|
||||
|
||||
label.build()
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
@@ -831,21 +812,9 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
// If path is empty, this means we're matching with the root directory itself
|
||||
// so we use the path_prefix as the name
|
||||
let path_prefix = if mat.path.is_empty() {
|
||||
project
|
||||
.read(cx)
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|wt| wt.read(cx).root_name().into())
|
||||
.unwrap_or_else(|| mat.path_prefix.clone())
|
||||
} else {
|
||||
mat.path_prefix.clone()
|
||||
};
|
||||
|
||||
Self::completion_for_path(
|
||||
project_path,
|
||||
&path_prefix,
|
||||
&mat.path_prefix,
|
||||
is_recent,
|
||||
mat.is_dir,
|
||||
source_range.clone(),
|
||||
|
||||
@@ -4,7 +4,7 @@ use acp_thread::{AcpThread, AgentThreadEntry};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol::{self as acp, ToolCallId};
|
||||
use collections::HashMap;
|
||||
use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior};
|
||||
use editor::{Editor, EditorMode, MinimapVisibility};
|
||||
use gpui::{
|
||||
AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
|
||||
ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
|
||||
@@ -357,7 +357,7 @@ fn create_editor_diff(
|
||||
EditorMode::Full {
|
||||
scale_ui_elements_with_buffer_font_size: false,
|
||||
show_active_line_background: false,
|
||||
sizing_behavior: SizingBehavior::SizeByContent,
|
||||
sized_by_content: true,
|
||||
},
|
||||
diff.read(cx).multibuffer().clone(),
|
||||
None,
|
||||
@@ -401,9 +401,10 @@ mod tests {
|
||||
use acp_thread::{AgentConnection, StubAgentConnection};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_settings::AgentSettings;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
|
||||
use editor::RowInfo;
|
||||
use editor::{EditorSettings, RowInfo};
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
|
||||
@@ -412,7 +413,7 @@ mod tests {
|
||||
use pretty_assertions::assert_matches;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -538,8 +539,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::{
|
||||
ChatWithFollow,
|
||||
acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion},
|
||||
context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content},
|
||||
};
|
||||
@@ -16,7 +15,6 @@ use editor::{
|
||||
MultiBuffer, ToOffset,
|
||||
actions::Paste,
|
||||
display_map::{Crease, CreaseId, FoldId},
|
||||
scroll::Autoscroll,
|
||||
};
|
||||
use futures::{
|
||||
FutureExt as _,
|
||||
@@ -51,7 +49,7 @@ use text::OffsetRangeExt;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{ButtonLike, TintColor, Toggleable, prelude::*};
|
||||
use util::{ResultExt, debug_panic, rel_path::RelPath};
|
||||
use workspace::{CollaboratorId, Workspace, notifications::NotifyResultExt as _};
|
||||
use workspace::{Workspace, notifications::NotifyResultExt as _};
|
||||
use zed_actions::agent::Chat;
|
||||
|
||||
pub struct MessageEditor {
|
||||
@@ -236,16 +234,8 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let uri = MentionUri::Thread {
|
||||
id: thread.id.clone(),
|
||||
name: thread.title.to_string(),
|
||||
};
|
||||
let content = format!("{}\n", uri.as_link());
|
||||
|
||||
let content_len = content.len() - 1;
|
||||
|
||||
let start = self.editor.update(cx, |editor, cx| {
|
||||
editor.set_text(content, window, cx);
|
||||
editor.set_text(format!("{}\n", thread.title), window, cx);
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
@@ -254,8 +244,18 @@ impl MessageEditor {
|
||||
.text_anchor
|
||||
});
|
||||
|
||||
self.confirm_mention_completion(thread.title, start, content_len, uri, window, cx)
|
||||
.detach();
|
||||
self.confirm_mention_completion(
|
||||
thread.title.clone(),
|
||||
start,
|
||||
thread.title.len(),
|
||||
MentionUri::Thread {
|
||||
id: thread.id.clone(),
|
||||
name: thread.title.to_string(),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -356,7 +356,7 @@ impl MessageEditor {
|
||||
|
||||
let task = match mention_uri.clone() {
|
||||
MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx),
|
||||
MentionUri::Directory { .. } => Task::ready(Ok(Mention::Link)),
|
||||
MentionUri::Directory { .. } => Task::ready(Ok(Mention::UriOnly)),
|
||||
MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx),
|
||||
MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx),
|
||||
MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx),
|
||||
@@ -373,6 +373,7 @@ impl MessageEditor {
|
||||
)))
|
||||
}
|
||||
MentionUri::Selection { .. } => {
|
||||
// Handled elsewhere
|
||||
debug_panic!("unexpected selection URI");
|
||||
Task::ready(Err(anyhow!("unexpected selection URI")))
|
||||
}
|
||||
@@ -591,21 +592,6 @@ impl MessageEditor {
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Take this explanation with a grain of salt but, with creases being
|
||||
// inserted, GPUI's recomputes the editor layout in the next frames, so
|
||||
// directly calling `editor.request_autoscroll` wouldn't work as
|
||||
// expected. We're leveraging `cx.on_next_frame` to wait 2 frames and
|
||||
// ensure that the layout has been recalculated so that the autoscroll
|
||||
// request actually shows the cursor's new position.
|
||||
let editor = self.editor.clone();
|
||||
cx.on_next_frame(window, move |_, window, cx| {
|
||||
cx.on_next_frame(window, move |_, _, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.request_autoscroll(Autoscroll::fit(), cx)
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn confirm_mention_for_thread(
|
||||
@@ -703,21 +689,20 @@ impl MessageEditor {
|
||||
return Task::ready(Err(err));
|
||||
}
|
||||
|
||||
let contents = self
|
||||
.mention_set
|
||||
.contents(full_mention_content, self.project.clone(), cx);
|
||||
let contents = self.mention_set.contents(
|
||||
&self.prompt_capabilities.borrow(),
|
||||
full_mention_content,
|
||||
self.project.clone(),
|
||||
cx,
|
||||
);
|
||||
let editor = self.editor.clone();
|
||||
let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context;
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
let contents = contents.await?;
|
||||
let mut all_tracked_buffers = Vec::new();
|
||||
|
||||
let result = editor.update(cx, |editor, cx| {
|
||||
let (mut ix, _) = text
|
||||
.char_indices()
|
||||
.find(|(_, c)| !c.is_whitespace())
|
||||
.unwrap_or((0, '\0'));
|
||||
let mut ix = 0;
|
||||
let mut chunks: Vec<acp::ContentBlock> = Vec::new();
|
||||
let text = editor.text(cx);
|
||||
editor.display_map.update(cx, |map, cx| {
|
||||
@@ -729,6 +714,15 @@ impl MessageEditor {
|
||||
|
||||
let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot());
|
||||
if crease_range.start > ix {
|
||||
//todo(): Custom slash command ContentBlock?
|
||||
// let chunk = if prevent_slash_commands
|
||||
// && ix == 0
|
||||
// && parse_slash_command(&text[ix..]).is_some()
|
||||
// {
|
||||
// format!(" {}", &text[ix..crease_range.start]).into()
|
||||
// } else {
|
||||
// text[ix..crease_range.start].into()
|
||||
// };
|
||||
let chunk = text[ix..crease_range.start].into();
|
||||
chunks.push(chunk);
|
||||
}
|
||||
@@ -738,32 +732,18 @@ impl MessageEditor {
|
||||
tracked_buffers,
|
||||
} => {
|
||||
all_tracked_buffers.extend(tracked_buffers.iter().cloned());
|
||||
if supports_embedded_context {
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
annotations: None,
|
||||
resource:
|
||||
acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
mime_type: None,
|
||||
text: content.clone(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
meta: None,
|
||||
},
|
||||
),
|
||||
meta: None,
|
||||
})
|
||||
} else {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
annotations: None,
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
mime_type: None,
|
||||
text: content.clone(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
meta: None,
|
||||
},
|
||||
),
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
Mention::Image(mention_image) => {
|
||||
let uri = match uri {
|
||||
@@ -785,22 +765,33 @@ impl MessageEditor {
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
Mention::Link => acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
Mention::UriOnly => {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
};
|
||||
chunks.push(chunk);
|
||||
ix = crease_range.end;
|
||||
}
|
||||
|
||||
if ix < text.len() {
|
||||
//todo(): Custom slash command ContentBlock?
|
||||
// let last_chunk = if prevent_slash_commands
|
||||
// && ix == 0
|
||||
// && parse_slash_command(&text[ix..]).is_some()
|
||||
// {
|
||||
// format!(" {}", text[ix..].trim_end())
|
||||
// } else {
|
||||
// text[ix..].trim_end().to_owned()
|
||||
// };
|
||||
let last_chunk = text[ix..].trim_end().to_owned();
|
||||
if !last_chunk.is_empty() {
|
||||
chunks.push(last_chunk.into());
|
||||
@@ -840,21 +831,6 @@ impl MessageEditor {
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn chat_with_follow(
|
||||
&mut self,
|
||||
_: &ChatWithFollow,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.follow(CollaboratorId::Agent, window, cx)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(MessageEditorEvent::Cancel)
|
||||
}
|
||||
@@ -1058,7 +1034,6 @@ impl MessageEditor {
|
||||
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
|
||||
message_editor.request_autoscroll(Autoscroll::fit(), cx);
|
||||
});
|
||||
if let Some(confirm) = completion.confirm {
|
||||
confirm(CompletionIntent::Complete, window, cx);
|
||||
@@ -1087,7 +1062,6 @@ impl MessageEditor {
|
||||
) {
|
||||
self.clear(window, cx);
|
||||
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let mut text = String::new();
|
||||
let mut mentions = Vec::new();
|
||||
|
||||
@@ -1100,8 +1074,7 @@ impl MessageEditor {
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(resource),
|
||||
..
|
||||
}) => {
|
||||
let Some(mention_uri) = MentionUri::parse(&resource.uri, path_style).log_err()
|
||||
else {
|
||||
let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() else {
|
||||
continue;
|
||||
};
|
||||
let start = text.len();
|
||||
@@ -1117,13 +1090,11 @@ impl MessageEditor {
|
||||
));
|
||||
}
|
||||
acp::ContentBlock::ResourceLink(resource) => {
|
||||
if let Some(mention_uri) =
|
||||
MentionUri::parse(&resource.uri, path_style).log_err()
|
||||
{
|
||||
if let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() {
|
||||
let start = text.len();
|
||||
write!(&mut text, "{}", mention_uri.as_link()).ok();
|
||||
let end = text.len();
|
||||
mentions.push((start..end, mention_uri, Mention::Link));
|
||||
mentions.push((start..end, mention_uri, Mention::UriOnly));
|
||||
}
|
||||
}
|
||||
acp::ContentBlock::Image(acp::ImageContent {
|
||||
@@ -1134,7 +1105,7 @@ impl MessageEditor {
|
||||
meta: _,
|
||||
}) => {
|
||||
let mention_uri = if let Some(uri) = uri {
|
||||
MentionUri::parse(&uri, path_style)
|
||||
MentionUri::parse(&uri)
|
||||
} else {
|
||||
Ok(MentionUri::PastedImage)
|
||||
};
|
||||
@@ -1195,17 +1166,6 @@ impl MessageEditor {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
pub fn set_placeholder_text(
|
||||
&mut self,
|
||||
placeholder: &str,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text(placeholder, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
@@ -1330,7 +1290,6 @@ impl Render for MessageEditor {
|
||||
div()
|
||||
.key_context("MessageEditor")
|
||||
.on_action(cx.listener(Self::chat))
|
||||
.on_action(cx.listener(Self::chat_with_follow))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.flex_1()
|
||||
@@ -1540,7 +1499,7 @@ pub enum Mention {
|
||||
tracked_buffers: Vec<Entity<Buffer>>,
|
||||
},
|
||||
Image(MentionImage),
|
||||
Link,
|
||||
UriOnly,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
@@ -1557,10 +1516,21 @@ pub struct MentionSet {
|
||||
impl MentionSet {
|
||||
fn contents(
|
||||
&self,
|
||||
prompt_capabilities: &acp::PromptCapabilities,
|
||||
full_mention_content: bool,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<HashMap<CreaseId, (MentionUri, Mention)>>> {
|
||||
if !prompt_capabilities.embedded_context {
|
||||
let mentions = self
|
||||
.mentions
|
||||
.iter()
|
||||
.map(|(crease_id, (uri, _))| (*crease_id, (uri.clone(), Mention::UriOnly)))
|
||||
.collect();
|
||||
|
||||
return Task::ready(Ok(mentions));
|
||||
}
|
||||
|
||||
let mentions = self.mentions.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let mut contents = HashMap::default();
|
||||
@@ -1628,7 +1598,6 @@ mod tests {
|
||||
use gpui::{
|
||||
AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use lsp::{CompletionContext, CompletionTriggerKind};
|
||||
use project::{CompletionIntent, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
@@ -1910,8 +1879,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
@@ -2084,8 +2055,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -2206,12 +2179,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("eight.txt b{slash}"),
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into()
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
]
|
||||
);
|
||||
editor.set_text("", window, cx);
|
||||
@@ -2239,10 +2210,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("eight.txt b{slash}"),
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into(),
|
||||
"Threads".into(),
|
||||
@@ -2275,7 +2246,7 @@ mod tests {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt a{slash}")]
|
||||
vec![format!("one.txt dir{slash}a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2296,11 +2267,21 @@ mod tests {
|
||||
assert_eq!(fold_ranges(editor, cx).len(), 1);
|
||||
});
|
||||
|
||||
let all_prompt_capabilities = acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2312,10 +2293,28 @@ mod tests {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(content, "1");
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_one, PathStyle::local()).unwrap()
|
||||
);
|
||||
pretty_assertions::assert_eq!(uri, &url_one.parse::<MentionUri>().unwrap());
|
||||
}
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor.mention_set().contents(
|
||||
&acp::PromptCapabilities::default(),
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let [(uri, Mention::UriOnly)] = contents.as_slice() else {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(uri, &url_one.parse::<MentionUri>().unwrap());
|
||||
}
|
||||
|
||||
cx.simulate_input(" ");
|
||||
@@ -2353,9 +2352,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2373,10 +2375,7 @@ mod tests {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(content, "8");
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_eight, PathStyle::local()).unwrap()
|
||||
);
|
||||
pretty_assertions::assert_eq!(uri, &url_eight.parse::<MentionUri>().unwrap());
|
||||
}
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
@@ -2461,7 +2460,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) @symbol ")
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["MySymbol one.txt L1"]);
|
||||
assert_eq!(current_completion_labels(editor), &["MySymbol"]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2476,9 +2475,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2514,7 +2516,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["x.png "]);
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2524,9 +2526,12 @@ mod tests {
|
||||
// Getting the message contents fails
|
||||
message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.expect_err("Should fail to load x.png");
|
||||
@@ -2553,7 +2558,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["x.png "]);
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2577,9 +2582,12 @@ mod tests {
|
||||
// Now getting the contents succeeds, because the invalid mention was removed
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2726,434 +2734,4 @@ mod tests {
|
||||
_ => panic!("Expected Text mention for small file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_thread_summary(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/project", json!({"file": ""})).await;
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
// Create a thread metadata to insert as summary
|
||||
let thread_metadata = agent::DbThreadMetadata {
|
||||
id: acp::SessionId("thread-123".into()),
|
||||
title: "Previous Conversation".into(),
|
||||
updated_at: chrono::Utc::now(),
|
||||
};
|
||||
|
||||
let message_editor = cx.update(|window, cx| {
|
||||
cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: None,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.insert_thread_summary(thread_metadata.clone(), window, cx);
|
||||
editor
|
||||
})
|
||||
});
|
||||
|
||||
// Construct expected values for verification
|
||||
let expected_uri = MentionUri::Thread {
|
||||
id: thread_metadata.id.clone(),
|
||||
name: thread_metadata.title.to_string(),
|
||||
};
|
||||
let expected_link = format!("[@{}]({})", thread_metadata.title, expected_uri.to_uri());
|
||||
|
||||
message_editor.read_with(cx, |editor, cx| {
|
||||
let text = editor.text(cx);
|
||||
|
||||
assert!(
|
||||
text.contains(&expected_link),
|
||||
"Expected editor text to contain thread mention link.\nExpected substring: {}\nActual text: {}",
|
||||
expected_link,
|
||||
text
|
||||
);
|
||||
|
||||
let mentions = editor.mentions();
|
||||
assert_eq!(
|
||||
mentions.len(),
|
||||
1,
|
||||
"Expected exactly one mention after inserting thread summary"
|
||||
);
|
||||
|
||||
assert!(
|
||||
mentions.contains(&expected_uri),
|
||||
"Expected mentions to contain the thread URI"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_whitespace_trimming(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/project", json!({"file.rs": "fn main() {}"}))
|
||||
.await;
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
let message_editor = cx.update(|window, cx| {
|
||||
cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: None,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
let editor = message_editor.update(cx, |message_editor, _| message_editor.editor.clone());
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(" \u{A0}してhello world ", window, cx);
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "してhello world".into(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_editor_respects_embedded_context_capability(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
let file_content = "fn main() { println!(\"Hello, world!\"); }\n";
|
||||
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
json!({
|
||||
"src": {
|
||||
"main.rs": file_content,
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| {
|
||||
let workspace_handle = cx.weak_entity();
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
max_lines: None,
|
||||
min_lines: 1,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
message_editor.read(cx).focus_handle(cx).focus(window);
|
||||
let editor = message_editor.read(cx).editor().clone();
|
||||
(message_editor, editor)
|
||||
});
|
||||
|
||||
cx.simulate_input("What is in @file main");
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(editor.text(cx), "What is in @file main");
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
let content = message_editor
|
||||
.update(cx, |editor, cx| editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
let main_rs_uri = if cfg!(windows) {
|
||||
"file:///C:/project/src/main.rs".to_string()
|
||||
} else {
|
||||
"file:///project/src/main.rs".to_string()
|
||||
};
|
||||
|
||||
// When embedded context is `false` we should get a resource link
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: main_rs_uri.clone(),
|
||||
name: "main.rs".to_string(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
})
|
||||
]
|
||||
);
|
||||
|
||||
message_editor.update(cx, |editor, _cx| {
|
||||
editor.prompt_capabilities.replace(acp::PromptCapabilities {
|
||||
embedded_context: true,
|
||||
..Default::default()
|
||||
})
|
||||
});
|
||||
|
||||
let content = message_editor
|
||||
.update(cx, |editor, cx| editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
// When embedded context is `true` we should get a resource
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
text: file_content.to_string(),
|
||||
uri: main_rs_uri,
|
||||
mime_type: None,
|
||||
meta: None
|
||||
}
|
||||
),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_autoscroll_after_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"test.txt": "line1\nline2\nline3\nline4\nline5\n",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
|
||||
let worktree = project.update(cx, |project, cx| {
|
||||
let mut worktrees = project.worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 1);
|
||||
worktrees.pop().unwrap()
|
||||
});
|
||||
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
|
||||
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
// Open a regular editor with the created file, and select a portion of
|
||||
// the text that will be used for the selections that are meant to be
|
||||
// inserted in the agent panel.
|
||||
let editor = workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: rel_path("test.txt").into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([Point::new(0, 0)..Point::new(0, 5)]);
|
||||
});
|
||||
});
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
// Create a new `MessageEditor`. The `EditorMode::full()` has to be used
|
||||
// to ensure we have a fixed viewport, so we can eventually actually
|
||||
// place the cursor outside of the visible area.
|
||||
let message_editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let workspace_handle = cx.weak_entity();
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::full(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
message_editor
|
||||
});
|
||||
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
// Update the Agent Panel's Message Editor text to have 100
|
||||
// lines, ensuring that the cursor is set at line 90 and that we
|
||||
// then scroll all the way to the top, so the cursor's position
|
||||
// remains off screen.
|
||||
let mut lines = String::new();
|
||||
for _ in 1..=100 {
|
||||
lines.push_str(&"Another line in the agent panel's message editor\n");
|
||||
}
|
||||
editor.set_text(lines.as_str(), window, cx);
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([Point::new(90, 0)..Point::new(90, 0)]);
|
||||
});
|
||||
editor.set_scroll_position(gpui::Point::new(0., 0.), window, cx);
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Before proceeding, let's assert that the cursor is indeed off screen,
|
||||
// otherwise the rest of the test doesn't make sense.
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
|
||||
let scroll_top = snapshot.scroll_position().y as u32;
|
||||
let visible_lines = editor.visible_line_count().unwrap() as u32;
|
||||
let visible_range = scroll_top..(scroll_top + visible_lines);
|
||||
|
||||
assert!(!visible_range.contains(&cursor_row));
|
||||
})
|
||||
});
|
||||
|
||||
// Now let's insert the selection in the Agent Panel's editor and
|
||||
// confirm that, after the insertion, the cursor is now in the visible
|
||||
// range.
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
|
||||
let scroll_top = snapshot.scroll_position().y as u32;
|
||||
let visible_lines = editor.visible_line_count().unwrap() as u32;
|
||||
let visible_range = scroll_top..(scroll_top + visible_lines);
|
||||
|
||||
assert!(visible_range.contains(&cursor_row));
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
use acp_thread::AgentSessionModes;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_servers::AgentServer;
|
||||
use agent_settings::AgentSettings;
|
||||
use fs::Fs;
|
||||
use gpui::{Context, Entity, FocusHandle, WeakEntity, Window, prelude::*};
|
||||
use settings::Settings as _;
|
||||
use std::{rc::Rc, sync::Arc};
|
||||
use ui::{
|
||||
Button, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, KeyBinding,
|
||||
@@ -86,14 +84,6 @@ impl ModeSelector {
|
||||
let current_mode = self.connection.current_mode();
|
||||
let default_mode = self.agent_server.default_mode(cx);
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let side = match settings.dock {
|
||||
settings::DockPosition::Left => DocumentationSide::Right,
|
||||
settings::DockPosition::Bottom | settings::DockPosition::Right => {
|
||||
DocumentationSide::Left
|
||||
}
|
||||
};
|
||||
|
||||
for mode in all_modes {
|
||||
let is_selected = &mode.id == ¤t_mode;
|
||||
let is_default = Some(&mode.id) == default_mode.as_ref();
|
||||
@@ -101,7 +91,7 @@ impl ModeSelector {
|
||||
.toggleable(IconPosition::End, is_selected);
|
||||
|
||||
let entry = if let Some(description) = &mode.description {
|
||||
entry.documentation_aside(side, DocumentationEdge::Bottom, {
|
||||
entry.documentation_aside(DocumentationSide::Left, DocumentationEdge::Bottom, {
|
||||
let description = description.clone();
|
||||
|
||||
move |cx| {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use acp_thread::{AgentModelInfo, AgentModelSelector};
|
||||
use acp_thread::AgentModelSelector;
|
||||
use gpui::{Entity, FocusHandle};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
use ui::{
|
||||
@@ -36,8 +36,12 @@ impl AcpModelSelectorPopover {
|
||||
self.menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
pub fn active_model<'a>(&self, cx: &'a App) -> Option<&'a AgentModelInfo> {
|
||||
self.selector.read(cx).delegate.active_model()
|
||||
pub fn active_model_name(&self, cx: &App) -> Option<SharedString> {
|
||||
self.selector
|
||||
.read(cx)
|
||||
.delegate
|
||||
.active_model()
|
||||
.map(|model| model.name.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -450,30 +450,31 @@ impl Render for AcpThreadHistory {
|
||||
v_flex()
|
||||
.key_context("ThreadHistory")
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.on_action(cx.listener(Self::select_previous))
|
||||
.on_action(cx.listener(Self::select_next))
|
||||
.on_action(cx.listener(Self::select_first))
|
||||
.on_action(cx.listener(Self::select_last))
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::remove_selected_thread))
|
||||
.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
.when(!self.history_store.read(cx).is_empty(cx), |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
})
|
||||
.child({
|
||||
let view = v_flex()
|
||||
.id("list-container")
|
||||
@@ -482,15 +483,19 @@ impl Render for AcpThreadHistory {
|
||||
.flex_grow();
|
||||
|
||||
if self.history_store.read(cx).is_empty(cx) {
|
||||
view.justify_center().items_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center()
|
||||
.items_center()
|
||||
.child(Label::new("No threads match your search.").size(LabelSize::Small))
|
||||
.child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center().child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("No threads match your search.").size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
view.child(
|
||||
uniform_list(
|
||||
@@ -667,7 +672,7 @@ impl EntryTimeFormat {
|
||||
timezone,
|
||||
time_format::TimestampFormat::EnhancedAbsolute,
|
||||
),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,17 +23,15 @@ use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use language_models::AllLanguageModelSettings;
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use settings::{SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
|
||||
ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
|
||||
SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
@@ -154,42 +152,7 @@ pub enum AssistantConfigurationEvent {
|
||||
|
||||
impl EventEmitter<AssistantConfigurationEvent> for AgentConfiguration {}
|
||||
|
||||
enum AgentIcon {
|
||||
Name(IconName),
|
||||
Path(SharedString),
|
||||
}
|
||||
|
||||
impl AgentConfiguration {
|
||||
fn render_section_title(
|
||||
&mut self,
|
||||
title: impl Into<SharedString>,
|
||||
description: impl Into<SharedString>,
|
||||
menu: AnyElement,
|
||||
) -> impl IntoElement {
|
||||
h_flex()
|
||||
.p_4()
|
||||
.pb_0()
|
||||
.mb_2p5()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.flex_wrap()
|
||||
.child(Headline::new(title.into()))
|
||||
.child(menu),
|
||||
)
|
||||
.child(Label::new(description.into()).color(Color::Muted)),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_provider_configuration_block(
|
||||
&mut self,
|
||||
provider: &Arc<dyn LanguageModelProvider>,
|
||||
@@ -324,7 +287,7 @@ impl AgentConfiguration {
|
||||
"Start New Thread",
|
||||
)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Thread)
|
||||
@@ -340,122 +303,89 @@ impl AgentConfiguration {
|
||||
}
|
||||
})),
|
||||
)
|
||||
})
|
||||
.when(
|
||||
is_expanded && is_removable_provider(&provider.id(), cx),
|
||||
|this| {
|
||||
this.child(
|
||||
Button::new(
|
||||
SharedString::from(format!("delete-provider-{provider_id}")),
|
||||
"Remove Provider",
|
||||
)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Trash)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener({
|
||||
let provider = provider.clone();
|
||||
move |this, _event, window, cx| {
|
||||
this.delete_provider(provider.clone(), window, cx);
|
||||
}
|
||||
})),
|
||||
)
|
||||
},
|
||||
),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn delete_provider(
|
||||
&mut self,
|
||||
provider: Arc<dyn LanguageModelProvider>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let fs = self.fs.clone();
|
||||
let provider_id = provider.id();
|
||||
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
cx.update(|_window, cx| {
|
||||
update_settings_file(fs.clone(), cx, {
|
||||
let provider_id = provider_id.clone();
|
||||
move |settings, _| {
|
||||
if let Some(ref mut openai_compatible) = settings
|
||||
.language_models
|
||||
.as_mut()
|
||||
.and_then(|lm| lm.openai_compatible.as_mut())
|
||||
{
|
||||
let key_to_remove: Arc<str> = Arc::from(provider_id.0.as_ref());
|
||||
openai_compatible.remove(&key_to_remove);
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
|
||||
cx.update(|_window, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, {
|
||||
let provider_id = provider_id.clone();
|
||||
move |registry, cx| {
|
||||
registry.unregister_provider(provider_id, cx);
|
||||
}
|
||||
})
|
||||
})
|
||||
.log_err();
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn render_provider_configuration_section(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let providers = LanguageModelRegistry::read_global(cx).providers();
|
||||
let popover_menu = PopoverMenu::new("add-provider-popover")
|
||||
.trigger(
|
||||
Button::new("add-provider", "Add Provider")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
|
||||
menu.header("Compatible APIs").entry("OpenAI", None, {
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AddLlmProviderModal::toggle(
|
||||
LlmCompatibleProvider::OpenAi,
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(self.render_section_title(
|
||||
"LLM Providers",
|
||||
"Add at least one provider to use AI-powered features with Zed's native agent.",
|
||||
popover_menu.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
h_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.pb_0()
|
||||
.mb_2p5()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(Headline::new("LLM Providers"))
|
||||
.child(
|
||||
PopoverMenu::new("add-provider-popover")
|
||||
.trigger(
|
||||
Button::new("add-provider", "Add Provider")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(
|
||||
window,
|
||||
cx,
|
||||
|menu, _window, _cx| {
|
||||
menu.header("Compatible APIs").entry(
|
||||
"OpenAI",
|
||||
None,
|
||||
{
|
||||
let workspace =
|
||||
workspace.clone();
|
||||
move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AddLlmProviderModal::toggle(
|
||||
LlmCompatibleProvider::OpenAi,
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
},
|
||||
)
|
||||
},
|
||||
))
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new("Add at least one provider to use AI-powered features with Zed's native agent.")
|
||||
.color(Color::Muted),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.w_full()
|
||||
@@ -534,7 +464,8 @@ impl AgentConfiguration {
|
||||
let add_server_popover = PopoverMenu::new("add-server-popover")
|
||||
.trigger(
|
||||
Button::new("add-server", "Add Server")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
@@ -567,57 +498,61 @@ impl AgentConfiguration {
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.render_section_title(
|
||||
"Model Context Protocol (MCP) Servers",
|
||||
"All MCP servers connected directly or via a Zed extension.",
|
||||
add_server_popover.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
v_flex()
|
||||
.pl_4()
|
||||
.pb_4()
|
||||
.pr_5()
|
||||
h_flex()
|
||||
.w_full()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.gap_1()
|
||||
.map(|mut parent| {
|
||||
if context_server_ids.is_empty() {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.p_4()
|
||||
.justify_center()
|
||||
.border_1()
|
||||
.border_dashed()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.rounded_sm()
|
||||
.child(
|
||||
Label::new("No MCP servers added yet.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
for (index, context_server_id) in
|
||||
context_server_ids.into_iter().enumerate()
|
||||
{
|
||||
if index > 0 {
|
||||
parent = parent.child(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
parent = parent.child(self.render_context_server(
|
||||
context_server_id,
|
||||
window,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
parent
|
||||
}
|
||||
}),
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Model Context Protocol (MCP) Servers"))
|
||||
.child(
|
||||
Label::new(
|
||||
"All MCP servers connected directly or via a Zed extension.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(add_server_popover),
|
||||
)
|
||||
.child(v_flex().w_full().gap_1().map(|mut parent| {
|
||||
if context_server_ids.is_empty() {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.p_4()
|
||||
.justify_center()
|
||||
.border_1()
|
||||
.border_dashed()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.rounded_sm()
|
||||
.child(
|
||||
Label::new("No MCP servers added yet.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
for (index, context_server_id) in context_server_ids.into_iter().enumerate() {
|
||||
if index > 0 {
|
||||
parent = parent.child(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
parent =
|
||||
parent.child(self.render_context_server(context_server_id, window, cx));
|
||||
}
|
||||
parent
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_context_server(
|
||||
@@ -638,13 +573,15 @@ impl AgentConfiguration {
|
||||
|
||||
let is_running = matches!(server_status, ContextServerStatus::Running);
|
||||
let item_id = SharedString::from(context_server_id.0.clone());
|
||||
// Servers without a configuration can only be provided by extensions.
|
||||
let provided_by_extension = server_configuration.is_none_or(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
});
|
||||
let is_from_extension = server_configuration
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
|
||||
Some(error)
|
||||
@@ -658,14 +595,14 @@ impl AgentConfiguration {
|
||||
.tools_for_server(&context_server_id)
|
||||
.count();
|
||||
|
||||
let (source_icon, source_tooltip) = if provided_by_extension {
|
||||
let (source_icon, source_tooltip) = if is_from_extension {
|
||||
(
|
||||
IconName::ZedSrcExtension,
|
||||
IconName::ZedMcpExtension,
|
||||
"This MCP server was installed from an extension.",
|
||||
)
|
||||
} else {
|
||||
(
|
||||
IconName::ZedSrcCustom,
|
||||
IconName::ZedMcpCustom,
|
||||
"This custom MCP server was installed directly.",
|
||||
)
|
||||
};
|
||||
@@ -708,6 +645,7 @@ impl AgentConfiguration {
|
||||
let fs = self.fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let language_registry = self.language_registry.clone();
|
||||
let context_server_store = self.context_server_store.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let context_server_registry = self.context_server_registry.clone();
|
||||
|
||||
@@ -749,10 +687,23 @@ impl AgentConfiguration {
|
||||
.entry("Uninstall", None, {
|
||||
let fs = fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let context_server_store = context_server_store.clone();
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
let is_provided_by_extension = context_server_store
|
||||
.read(cx)
|
||||
.configuration_for_server(&context_server_id)
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
let uninstall_extension_task = match (
|
||||
provided_by_extension,
|
||||
is_provided_by_extension,
|
||||
resolve_extension_for_context_server(&context_server_id, cx),
|
||||
) {
|
||||
(true, Some((id, manifest))) => {
|
||||
@@ -933,9 +884,9 @@ impl AgentConfiguration {
|
||||
}
|
||||
|
||||
fn render_agent_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let agent_server_store = self.agent_server_store.read(cx);
|
||||
|
||||
let user_defined_agents = agent_server_store
|
||||
let user_defined_agents = self
|
||||
.agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
@@ -946,121 +897,102 @@ impl AgentConfiguration {
|
||||
let user_defined_agents = user_defined_agents
|
||||
.into_iter()
|
||||
.map(|name| {
|
||||
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
|
||||
AgentIcon::Path(icon_path)
|
||||
} else {
|
||||
AgentIcon::Name(IconName::Ai)
|
||||
};
|
||||
self.render_agent_server(icon, name, true)
|
||||
self.render_agent_server(IconName::Ai, name)
|
||||
.into_any_element()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let add_agens_button = Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(move |_, window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
v_flex()
|
||||
.child(self.render_section_title(
|
||||
"External Agents",
|
||||
"All agents connected through the Agent Client Protocol.",
|
||||
add_agens_button.into_any_element(),
|
||||
))
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.child(
|
||||
v_flex()
|
||||
.p_4()
|
||||
.pt_0()
|
||||
.gap_2()
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiClaude),
|
||||
"Claude Code",
|
||||
false,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiOpenAi),
|
||||
"Codex CLI",
|
||||
false,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiGemini),
|
||||
"Gemini CLI",
|
||||
false,
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent
|
||||
.child(
|
||||
Divider::horizontal().color(DividerColor::BorderFaded),
|
||||
)
|
||||
.child(agent);
|
||||
}
|
||||
parent
|
||||
}),
|
||||
),
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(Headline::new("External Agents"))
|
||||
.child(
|
||||
Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(
|
||||
move |_, window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(
|
||||
workspace,
|
||||
cx,
|
||||
).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
),
|
||||
)
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
"All agents connected through the Agent Client Protocol.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiClaude,
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(agent);
|
||||
}
|
||||
parent
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
fn render_agent_server(
|
||||
&self,
|
||||
icon: AgentIcon,
|
||||
icon: IconName,
|
||||
name: impl Into<SharedString>,
|
||||
external: bool,
|
||||
) -> impl IntoElement {
|
||||
let name = name.into();
|
||||
let icon = match icon {
|
||||
AgentIcon::Name(icon_name) => Icon::new(icon_name)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
AgentIcon::Path(icon_path) => Icon::from_external_svg(icon_path)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
};
|
||||
|
||||
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
|
||||
let tooltip_message = format!("The {} agent was installed from an extension.", name);
|
||||
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(icon)
|
||||
.child(Label::new(name))
|
||||
.when(external, |this| {
|
||||
this.child(
|
||||
div()
|
||||
.id(tooltip_id)
|
||||
.flex_none()
|
||||
.tooltip(Tooltip::text(tooltip_message))
|
||||
.child(
|
||||
Icon::new(IconName::ZedSrcExtension)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
h_flex().gap_1p5().justify_between().child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
|
||||
.child(Label::new(name.into()))
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Success),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1289,14 +1221,3 @@ fn find_text_in_buffer(
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// OpenAI-compatible providers are user-configured and can be removed,
|
||||
// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't.
|
||||
//
|
||||
// If in the future we have more "API-compatible-type" of providers,
|
||||
// they should be included here as removable providers.
|
||||
fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool {
|
||||
AllLanguageModelSettings::get_global(cx)
|
||||
.openai_compatible
|
||||
.contains_key(provider_id.0.as_ref())
|
||||
}
|
||||
|
||||
@@ -515,14 +515,16 @@ impl Render for AddLlmProviderModal {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use editor::EditorSettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use language::language_settings;
|
||||
use language_model::{
|
||||
LanguageModelProviderId, LanguageModelProviderName,
|
||||
fake_provider::FakeLanguageModelProvider,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -728,9 +730,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
|
||||
language_settings::init(cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
language_models::init_settings(cx);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
@@ -7,10 +7,8 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use settings::{
|
||||
LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file,
|
||||
};
|
||||
use language_model::LanguageModel;
|
||||
use settings::Settings as _;
|
||||
use ui::{
|
||||
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
|
||||
};
|
||||
@@ -18,7 +16,6 @@ use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::language_model_selector::{LanguageModelSelector, language_model_selector};
|
||||
use crate::{AgentPanel, ManageProfiles};
|
||||
|
||||
enum Mode {
|
||||
@@ -35,11 +32,6 @@ enum Mode {
|
||||
tool_picker: Entity<ToolPicker>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
ConfigureDefaultModel {
|
||||
profile_id: AgentProfileId,
|
||||
model_picker: Entity<LanguageModelSelector>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
}
|
||||
|
||||
impl Mode {
|
||||
@@ -91,7 +83,6 @@ pub struct ChooseProfileMode {
|
||||
pub struct ViewProfileMode {
|
||||
profile_id: AgentProfileId,
|
||||
fork_profile: NavigableEntry,
|
||||
configure_default_model: NavigableEntry,
|
||||
configure_tools: NavigableEntry,
|
||||
configure_mcps: NavigableEntry,
|
||||
cancel_item: NavigableEntry,
|
||||
@@ -189,7 +180,6 @@ impl ManageProfilesModal {
|
||||
self.mode = Mode::ViewProfile(ViewProfileMode {
|
||||
profile_id,
|
||||
fork_profile: NavigableEntry::focusable(cx),
|
||||
configure_default_model: NavigableEntry::focusable(cx),
|
||||
configure_tools: NavigableEntry::focusable(cx),
|
||||
configure_mcps: NavigableEntry::focusable(cx),
|
||||
cancel_item: NavigableEntry::focusable(cx),
|
||||
@@ -197,83 +187,6 @@ impl ManageProfilesModal {
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_default_model(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let fs = self.fs.clone();
|
||||
let profile_id_for_closure = profile_id.clone();
|
||||
|
||||
let model_picker = cx.new(|cx| {
|
||||
let fs = fs.clone();
|
||||
let profile_id = profile_id_for_closure.clone();
|
||||
|
||||
language_model_selector(
|
||||
{
|
||||
let profile_id = profile_id.clone();
|
||||
move |cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
settings
|
||||
.profiles
|
||||
.get(&profile_id)
|
||||
.and_then(|profile| profile.default_model.as_ref())
|
||||
.and_then(|selection| {
|
||||
let registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider_id = language_model::LanguageModelProviderId(
|
||||
gpui::SharedString::from(selection.provider.0.clone()),
|
||||
);
|
||||
let provider = registry.provider(&provider_id)?;
|
||||
let model = provider
|
||||
.provided_models(cx)
|
||||
.iter()
|
||||
.find(|m| m.id().0 == selection.model.as_str())?
|
||||
.clone();
|
||||
Some(language_model::ConfiguredModel { provider, model })
|
||||
})
|
||||
}
|
||||
},
|
||||
move |model, cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model_id = model.id().0.to_string();
|
||||
let profile_id = profile_id.clone();
|
||||
|
||||
update_settings_file(fs.clone(), cx, move |settings, _cx| {
|
||||
let agent_settings = settings.agent.get_or_insert_default();
|
||||
if let Some(profiles) = agent_settings.profiles.as_mut() {
|
||||
if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) {
|
||||
profile.default_model = Some(LanguageModelSelection {
|
||||
provider: LanguageModelProviderSetting(provider.clone()),
|
||||
model: model_id.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false, // Do not use popover styles for the model picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.modal(false)
|
||||
});
|
||||
|
||||
let dismiss_subscription = cx.subscribe_in(&model_picker, window, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |this, _picker, _: &DismissEvent, window, cx| {
|
||||
this.view_profile(profile_id.clone(), window, cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
_subscription: dismiss_subscription,
|
||||
};
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_mcp_tools(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
@@ -364,7 +277,6 @@ impl ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => {}
|
||||
Mode::ConfigureTools { .. } => {}
|
||||
Mode::ConfigureMcps { .. } => {}
|
||||
Mode::ConfigureDefaultModel { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -387,9 +299,6 @@ impl ManageProfilesModal {
|
||||
Mode::ConfigureMcps { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
Mode::ConfigureDefaultModel { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -404,7 +313,6 @@ impl Focusable for ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => self.focus_handle.clone(),
|
||||
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -636,47 +544,6 @@ impl ManageProfilesModal {
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-default-model")
|
||||
.track_focus(&mode.configure_default_model.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _: &menu::Confirm, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new("model-item")
|
||||
.toggle_state(
|
||||
mode.configure_default_model
|
||||
.focus_handle
|
||||
.contains_focused(window, cx),
|
||||
)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.start_slot(
|
||||
Icon::new(IconName::ZedAssistant)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("Configure Default Model"))
|
||||
.on_click({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-builtin-tools")
|
||||
@@ -801,7 +668,6 @@ impl ManageProfilesModal {
|
||||
.into_any_element(),
|
||||
)
|
||||
.entry(mode.fork_profile)
|
||||
.entry(mode.configure_default_model)
|
||||
.entry(mode.configure_tools)
|
||||
.entry(mode.configure_mcps)
|
||||
.entry(mode.cancel_item)
|
||||
@@ -887,29 +753,6 @@ impl Render for ManageProfilesModal {
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
..
|
||||
} => {
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
v_flex()
|
||||
.pb_1()
|
||||
.child(ProfileModalHeader::new(
|
||||
format!("{profile_name} — Configure Default Model"),
|
||||
Some(IconName::Ai),
|
||||
))
|
||||
.child(ListSeparator)
|
||||
.child(v_flex().w(rems(34.)).child(model_picker.clone()))
|
||||
.child(ListSeparator)
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureMcps {
|
||||
profile_id,
|
||||
tool_picker,
|
||||
|
||||
@@ -314,7 +314,6 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: default_profile.default_model.clone(),
|
||||
});
|
||||
|
||||
if let Some(server_id) = server_id {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
|
||||
use acp_thread::{AcpThread, AcpThreadEvent};
|
||||
use action_log::ActionLogTelemetry;
|
||||
use action_log::ActionLog;
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
@@ -40,16 +40,87 @@ use zed_actions::assistant::ToggleFocus;
|
||||
pub struct AgentDiffPane {
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
editor: Entity<Editor>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
focus_handle: FocusHandle,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
title: SharedString,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum AgentDiffThread {
|
||||
AcpThread(Entity<AcpThread>),
|
||||
}
|
||||
|
||||
impl AgentDiffThread {
|
||||
fn project(&self, cx: &App) -> Entity<Project> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
|
||||
}
|
||||
}
|
||||
fn action_log(&self, cx: &App) -> Entity<ActionLog> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn title(&self, cx: &App) -> SharedString {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).title(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_generating(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
thread.read(cx).status() == acp_thread::ThreadStatus::Generating
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
}
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> WeakAgentDiffThread {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
WeakAgentDiffThread::AcpThread(thread.downgrade())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<AcpThread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<AcpThread>) -> Self {
|
||||
AgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum WeakAgentDiffThread {
|
||||
AcpThread(WeakEntity<AcpThread>),
|
||||
}
|
||||
|
||||
impl WeakAgentDiffThread {
|
||||
pub fn upgrade(&self) -> Option<AgentDiffThread> {
|
||||
match self {
|
||||
WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<AcpThread>) -> Self {
|
||||
WeakAgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentDiffPane {
|
||||
pub fn deploy(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -60,11 +131,12 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn deploy_in_workspace(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let thread = thread.into();
|
||||
let existing_diff = workspace
|
||||
.items_of_type::<AgentDiffPane>(cx)
|
||||
.find(|diff| diff.read(cx).thread == thread);
|
||||
@@ -81,7 +153,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -89,7 +161,7 @@ impl AgentDiffPane {
|
||||
let focus_handle = cx.focus_handle();
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
let project = thread.read(cx).project().clone();
|
||||
let project = thread.project(cx);
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
@@ -100,16 +172,19 @@ impl AgentDiffPane {
|
||||
editor
|
||||
});
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx);
|
||||
|
||||
let mut this = Self {
|
||||
_subscriptions: vec![
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx
|
||||
.subscribe(thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
},
|
||||
],
|
||||
title: SharedString::default(),
|
||||
multibuffer,
|
||||
@@ -124,12 +199,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let changed_buffers = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.action_log()
|
||||
.read(cx)
|
||||
.changed_buffers(cx);
|
||||
let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
|
||||
|
||||
for (buffer, diff_handle) in changed_buffers {
|
||||
@@ -216,7 +286,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_title(&mut self, cx: &mut Context<Self>) {
|
||||
let new_title = self.thread.read(cx).title();
|
||||
let new_title = self.thread.title(cx);
|
||||
if new_title != self.title {
|
||||
self.title = new_title;
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
@@ -278,18 +348,16 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let telemetry = ActionLogTelemetry::from(self.thread.read(cx));
|
||||
let action_log = self.thread.read(cx).action_log().clone();
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(Some(telemetry), cx)
|
||||
});
|
||||
self.thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx))
|
||||
}
|
||||
}
|
||||
|
||||
fn keep_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -304,7 +372,7 @@ fn keep_edits_in_selection(
|
||||
fn reject_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -318,7 +386,7 @@ fn reject_edits_in_selection(
|
||||
fn keep_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -333,15 +401,8 @@ fn keep_edits_in_ranges(
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer,
|
||||
hunk.buffer_range.clone(),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
thread.action_log(cx).update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -350,7 +411,7 @@ fn keep_edits_in_ranges(
|
||||
fn reject_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -374,12 +435,11 @@ fn reject_edits_in_ranges(
|
||||
}
|
||||
}
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
action_log
|
||||
thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, Some(telemetry.clone()), cx)
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -479,7 +539,7 @@ impl Item for AgentDiffPane {
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
let title = self.thread.read(cx).title();
|
||||
let title = self.thread.title(cx);
|
||||
Label::new(format!("Review: {}", title))
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
@@ -660,7 +720,7 @@ impl Render for AgentDiffPane {
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_hunk_controls(thread: &Entity<AcpThread>) -> editor::RenderDiffHunkControlsFn {
|
||||
fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
|
||||
let thread = thread.clone();
|
||||
|
||||
Arc::new(
|
||||
@@ -687,7 +747,7 @@ fn render_diff_hunk_controls(
|
||||
hunk_range: Range<editor::Anchor>,
|
||||
is_created_file: bool,
|
||||
line_height: Pixels,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
editor: &Entity<Editor>,
|
||||
cx: &mut App,
|
||||
) -> AnyElement {
|
||||
@@ -910,7 +970,9 @@ impl AgentDiffToolbar {
|
||||
None => ToolbarItemLocation::Hidden,
|
||||
Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight,
|
||||
Some(AgentDiffToolbarItem::Editor { state, .. }) => match state {
|
||||
EditorState::Reviewing => ToolbarItemLocation::PrimaryRight,
|
||||
EditorState::Generating | EditorState::Reviewing => {
|
||||
ToolbarItemLocation::PrimaryRight
|
||||
}
|
||||
EditorState::Idle => ToolbarItemLocation::Hidden,
|
||||
},
|
||||
}
|
||||
@@ -988,6 +1050,7 @@ impl Render for AgentDiffToolbar {
|
||||
|
||||
let content = match state {
|
||||
EditorState::Idle => return Empty.into_any(),
|
||||
EditorState::Generating => vec![spinner_icon],
|
||||
EditorState::Reviewing => vec![
|
||||
h_flex()
|
||||
.child(
|
||||
@@ -1101,11 +1164,8 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_calls();
|
||||
let has_pending_edit_tool_use =
|
||||
agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
@@ -1162,10 +1222,11 @@ pub struct AgentDiff {
|
||||
pub enum EditorState {
|
||||
Idle,
|
||||
Reviewing,
|
||||
Generating,
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
thread: WeakEntity<AcpThread>,
|
||||
thread: WeakAgentDiffThread,
|
||||
_thread_subscriptions: (Subscription, Subscription),
|
||||
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
|
||||
_settings_subscription: Subscription,
|
||||
@@ -1190,23 +1251,23 @@ impl AgentDiff {
|
||||
|
||||
pub fn set_active_thread(
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
Self::global(cx).update(cx, |this, cx| {
|
||||
this.register_active_thread_impl(workspace, thread, window, cx);
|
||||
this.register_active_thread_impl(workspace, thread.into(), window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_active_thread_impl(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx);
|
||||
|
||||
let action_log_subscription = cx.observe_in(&action_log, window, {
|
||||
let workspace = workspace.clone();
|
||||
@@ -1215,12 +1276,14 @@ impl AgentDiff {
|
||||
}
|
||||
});
|
||||
|
||||
let thread_subscription = cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
});
|
||||
let thread_subscription = match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) {
|
||||
// replace thread and action log subscription, but keep editors
|
||||
@@ -1297,7 +1360,7 @@ impl AgentDiff {
|
||||
|
||||
fn register_review_action<T: Action>(
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
|
||||
+ 'static,
|
||||
this: &Entity<AgentDiff>,
|
||||
) {
|
||||
@@ -1457,7 +1520,7 @@ impl AgentDiff {
|
||||
return;
|
||||
};
|
||||
|
||||
let action_log = thread.read(cx).action_log();
|
||||
let action_log = thread.action_log(cx);
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
let mut unaffected = self.reviewing_editors.clone();
|
||||
@@ -1482,11 +1545,15 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let reviewing_state = EditorState::Reviewing;
|
||||
let new_state = if thread.is_generating(cx) {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
};
|
||||
|
||||
let previous_state = self
|
||||
.reviewing_editors
|
||||
.insert(weak_editor.clone(), reviewing_state.clone());
|
||||
.insert(weak_editor.clone(), new_state.clone());
|
||||
|
||||
if previous_state.is_none() {
|
||||
editor.update(cx, |editor, cx| {
|
||||
@@ -1499,9 +1566,7 @@ impl AgentDiff {
|
||||
unaffected.remove(weak_editor);
|
||||
}
|
||||
|
||||
if reviewing_state == EditorState::Reviewing
|
||||
&& previous_state != Some(reviewing_state)
|
||||
{
|
||||
if new_state == EditorState::Reviewing && previous_state != Some(new_state) {
|
||||
// Jump to first hunk when we enter review mode
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
@@ -1576,7 +1641,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1596,7 +1661,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1616,7 +1681,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1629,7 +1694,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1652,7 +1717,7 @@ impl AgentDiff {
|
||||
fn review_in_active_editor(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
@@ -1674,7 +1739,7 @@ impl AgentDiff {
|
||||
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx)
|
||||
&& let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton()
|
||||
{
|
||||
let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
|
||||
let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
|
||||
let mut keys = changed_buffers.keys().cycle();
|
||||
keys.find(|k| *k == &curr_buffer);
|
||||
@@ -1717,11 +1782,12 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::Keep;
|
||||
use acp_thread::AgentConnection as _;
|
||||
use agent_settings::AgentSettings;
|
||||
use editor::EditorSettings;
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{path::Path, rc::Rc};
|
||||
use util::path;
|
||||
|
||||
@@ -1730,8 +1796,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
});
|
||||
|
||||
@@ -1758,7 +1829,8 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let action_log = cx.read(|cx| thread.read(cx).action_log().clone());
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
let action_log = cx.read(|cx| thread.action_log(cx));
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
@@ -1884,8 +1956,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
workspace::register_project_item::<Editor>(cx);
|
||||
});
|
||||
@@ -1941,6 +2018,7 @@ mod tests {
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
// Set the active thread
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
cx.update(|window, cx| {
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
|
||||
});
|
||||
|
||||
@@ -47,7 +47,6 @@ impl AgentModelSelector {
|
||||
}
|
||||
}
|
||||
},
|
||||
true, // Use popover styles for picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -16,9 +16,11 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView as DefaultView, LanguageModelProviderSetting, LanguageModelSelection,
|
||||
};
|
||||
|
||||
use zed_actions::OpenBrowser;
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
|
||||
use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant,
|
||||
@@ -31,14 +33,9 @@ use crate::{
|
||||
text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
|
||||
ui::{AgentOnboardingModal, EndTrialUpsell},
|
||||
};
|
||||
use crate::{
|
||||
ExpandMessageEditor,
|
||||
acp::{AcpThreadHistory, ThreadHistoryEvent},
|
||||
};
|
||||
use crate::{
|
||||
ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
|
||||
};
|
||||
use crate::{ManageProfiles, context_store::ContextStore};
|
||||
use agent_settings::AgentSettings;
|
||||
use ai_onboarding::AgentPanelOnboarding;
|
||||
use anyhow::{Result, anyhow};
|
||||
@@ -109,12 +106,6 @@ pub fn init(cx: &mut App) {
|
||||
}
|
||||
},
|
||||
)
|
||||
.register_action(|workspace, _: &ExpandMessageEditor, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| panel.expand_message_editor(window, cx));
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &OpenHistory, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
@@ -738,25 +729,6 @@ impl AgentPanel {
|
||||
&self.context_server_registry
|
||||
}
|
||||
|
||||
pub fn is_hidden(workspace: &Entity<Workspace>, cx: &App) -> bool {
|
||||
let workspace_read = workspace.read(cx);
|
||||
|
||||
workspace_read
|
||||
.panel::<AgentPanel>(cx)
|
||||
.map(|panel| {
|
||||
let panel_id = Entity::entity_id(&panel);
|
||||
|
||||
let is_visible = workspace_read.all_docks().iter().any(|dock| {
|
||||
dock.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|visible_panel| visible_panel.panel_id() == panel_id)
|
||||
});
|
||||
|
||||
!is_visible
|
||||
})
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
fn active_thread_view(&self) -> Option<&Entity<AcpThreadView>> {
|
||||
match &self.active_view {
|
||||
ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view),
|
||||
@@ -953,15 +925,6 @@ impl AgentPanel {
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(thread_view) = self.active_thread_view() {
|
||||
thread_view.update(cx, |view, cx| {
|
||||
view.expand_message_editor(&ExpandMessageEditor, window, cx);
|
||||
view.focus_handle(cx).focus(window);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn open_history(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if matches!(self.active_view, ActiveView::History) {
|
||||
if let Some(previous_view) = self.previous_view.take() {
|
||||
@@ -1780,9 +1743,10 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.action("Add Custom Server…", Box::new(AddContextServer))
|
||||
.separator()
|
||||
.separator();
|
||||
|
||||
menu = menu
|
||||
.action("Rules", Box::new(OpenRulesLibrary::default()))
|
||||
.action("Profiles", Box::new(ManageProfiles::default()))
|
||||
.action("Settings", Box::new(OpenSettings))
|
||||
.separator()
|
||||
.action(full_screen_label, Box::new(ToggleZoom));
|
||||
@@ -1880,12 +1844,7 @@ impl AgentPanel {
|
||||
{
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |_window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"New Thread…",
|
||||
&ToggleNewThreadMenu,
|
||||
&focus_handle,
|
||||
cx,
|
||||
)
|
||||
Tooltip::for_action_in("New…", &ToggleNewThreadMenu, &focus_handle, cx)
|
||||
}
|
||||
},
|
||||
)
|
||||
@@ -1983,7 +1942,7 @@ impl AgentPanel {
|
||||
.separator()
|
||||
.header("External Agents")
|
||||
.item(
|
||||
ContextMenuEntry::new("New Claude Code")
|
||||
ContextMenuEntry::new("New Claude Code Thread")
|
||||
.icon(IconName::AiClaude)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -2009,7 +1968,7 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex CLI")
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -2035,7 +1994,7 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI")
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
@@ -2079,9 +2038,9 @@ impl AgentPanel {
|
||||
for agent_name in agent_names {
|
||||
let icon_path = agent_server_store_read.agent_icon(&agent_name);
|
||||
let mut entry =
|
||||
ContextMenuEntry::new(format!("New {}", agent_name));
|
||||
ContextMenuEntry::new(format!("New {} Thread", agent_name));
|
||||
if let Some(icon_path) = icon_path {
|
||||
entry = entry.custom_icon_svg(icon_path);
|
||||
entry = entry.custom_icon_path(icon_path);
|
||||
} else {
|
||||
entry = entry.icon(IconName::Terminal);
|
||||
}
|
||||
@@ -2131,20 +2090,12 @@ impl AgentPanel {
|
||||
menu
|
||||
})
|
||||
.separator()
|
||||
.item(
|
||||
ContextMenuEntry::new("Add More Agents")
|
||||
.icon(IconName::Plus)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
move |window, cx| {
|
||||
window.dispatch_action(Box::new(zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::AgentServers,
|
||||
),
|
||||
id: None,
|
||||
}), cx)
|
||||
}
|
||||
}),
|
||||
.link(
|
||||
"Add Other Agents",
|
||||
OpenBrowser {
|
||||
url: zed_urls::external_agents_docs(cx),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
@@ -2158,7 +2109,7 @@ impl AgentPanel {
|
||||
.when_some(selected_agent_custom_icon, |this, icon_path| {
|
||||
let label = selected_agent_label.clone();
|
||||
this.px(DynamicSpacing::Base02.rems(cx))
|
||||
.child(Icon::from_external_svg(icon_path).color(Color::Muted))
|
||||
.child(Icon::from_path(icon_path).color(Color::Muted))
|
||||
.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
|
||||
})
|
||||
|
||||
@@ -12,6 +12,7 @@ mod context_strip;
|
||||
mod inline_assistant;
|
||||
mod inline_prompt_editor;
|
||||
mod language_model_selector;
|
||||
mod message_editor;
|
||||
mod profile_selector;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
@@ -247,6 +248,8 @@ pub fn init(
|
||||
is_eval: bool,
|
||||
cx: &mut App,
|
||||
) {
|
||||
AgentSettings::register(cx);
|
||||
|
||||
assistant_text_thread::init(client.clone(), cx);
|
||||
rules_library::init(cx);
|
||||
if !is_eval {
|
||||
|
||||
@@ -1082,7 +1082,10 @@ mod tests {
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust};
|
||||
use language::{
|
||||
Buffer, Language, LanguageConfig, LanguageMatcher, Point, language_settings,
|
||||
tree_sitter_rust,
|
||||
};
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
@@ -1462,6 +1465,8 @@ mod tests {
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(Project::init_settings);
|
||||
cx.update(language_settings::init);
|
||||
}
|
||||
|
||||
fn simulate_response_stream(
|
||||
|
||||
@@ -620,18 +620,8 @@ impl TextThreadContextHandle {
|
||||
|
||||
impl Display for TextThreadContext {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "<text_thread title=\"")?;
|
||||
for c in self.title.chars() {
|
||||
match c {
|
||||
'&' => write!(f, "&")?,
|
||||
'<' => write!(f, "<")?,
|
||||
'>' => write!(f, ">")?,
|
||||
'"' => write!(f, """)?,
|
||||
'\'' => write!(f, "'")?,
|
||||
_ => write!(f, "{}", c)?,
|
||||
}
|
||||
}
|
||||
writeln!(f, "\">")?;
|
||||
// TODO: escape title?
|
||||
writeln!(f, "<text_thread title=\"{}\">", self.title)?;
|
||||
write!(f, "{}", self.text.trim())?;
|
||||
write!(f, "\n</text_thread>")
|
||||
}
|
||||
@@ -1075,6 +1065,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user