Compare commits
316 Commits
linux-ci
...
diff-perf-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dfeb67f93c | ||
|
|
0f5a63a9b0 | ||
|
|
c8ada5b1ae | ||
|
|
27a18843d4 | ||
|
|
2bc1d60c52 | ||
|
|
17933f1222 | ||
|
|
cd87307289 | ||
|
|
11b29d693f | ||
|
|
c061698229 | ||
|
|
b4f7af066e | ||
|
|
c83621fa1f | ||
|
|
0da52d6774 | ||
|
|
60ee0dd19b | ||
|
|
9fc4abd8de | ||
|
|
2ead8c42fb | ||
|
|
0a4b1ac696 | ||
|
|
f9fb855990 | ||
|
|
b587a62ac3 | ||
|
|
1b2e38bb33 | ||
|
|
4339c772e4 | ||
|
|
ba7ea71c00 | ||
|
|
cd04450273 | ||
|
|
769a8a650e | ||
|
|
94ff4aa4b2 | ||
|
|
1e1480405a | ||
|
|
cdd7d4b2fb | ||
|
|
4fd2b3f374 | ||
|
|
43a7f96462 | ||
|
|
2a2e04bb5c | ||
|
|
9bf212bd1e | ||
|
|
38cd16aad9 | ||
|
|
d8655f0656 | ||
|
|
91d631c229 | ||
|
|
054d2e1524 | ||
|
|
982f2418f4 | ||
|
|
9f580464f0 | ||
|
|
fc3e503cfe | ||
|
|
52c49b86b2 | ||
|
|
07b707153d | ||
|
|
75cef88ea2 | ||
|
|
46b39f0077 | ||
|
|
fb410ab3ae | ||
|
|
1b93242351 | ||
|
|
fb6e41d51e | ||
|
|
0ec31db398 | ||
|
|
a262ca1cd5 | ||
|
|
6a38d699dc | ||
|
|
95feefc1cf | ||
|
|
a827f25d00 | ||
|
|
cc6208b17f | ||
|
|
8d15ec7f99 | ||
|
|
ca5a4dcffa | ||
|
|
3e7b8efb98 | ||
|
|
4002b32ad4 | ||
|
|
3ef8163357 | ||
|
|
b9524837bb | ||
|
|
e5660d25f1 | ||
|
|
57adf42492 | ||
|
|
4cdcb0c15e | ||
|
|
9113a20b8b | ||
|
|
1631cec15a | ||
|
|
5e41ce17e3 | ||
|
|
5ed458497e | ||
|
|
9ecf257502 | ||
|
|
2eeb02305c | ||
|
|
c2b3e60f6d | ||
|
|
d075a56ee7 | ||
|
|
8217e57a2d | ||
|
|
08daedd014 | ||
|
|
4da5675920 | ||
|
|
5fc54986c7 | ||
|
|
cb5055aaec | ||
|
|
454d649b6e | ||
|
|
222767e69b | ||
|
|
d7b7fa3ee2 | ||
|
|
7cfce60570 | ||
|
|
45b78482f5 | ||
|
|
71f1f3728d | ||
|
|
8b560cd8aa | ||
|
|
38e1e3f498 | ||
|
|
a6b177d806 | ||
|
|
73366bef62 | ||
|
|
48bd253358 | ||
|
|
2131d88e48 | ||
|
|
42149df0f2 | ||
|
|
379bdb227a | ||
|
|
04e53bff3d | ||
|
|
28f30fc851 | ||
|
|
f8b414c22c | ||
|
|
50504793e6 | ||
|
|
b625263989 | ||
|
|
c8f9db2e24 | ||
|
|
bc3c88e737 | ||
|
|
3a058138c1 | ||
|
|
f2b539598e | ||
|
|
dc503e9975 | ||
|
|
73b75a7765 | ||
|
|
deacd3e922 | ||
|
|
f7153bbe8a | ||
|
|
4e7ba8e680 | ||
|
|
9909b59bd0 | ||
|
|
00ff89f00f | ||
|
|
12fe12b5ac | ||
|
|
a9bc890497 | ||
|
|
d887e2050f | ||
|
|
d5421ba1a8 | ||
|
|
548cdfde3a | ||
|
|
2408f767f4 | ||
|
|
df15d2d2fe | ||
|
|
07dcb8f2bb | ||
|
|
06bdb28517 | ||
|
|
d6b58bb948 | ||
|
|
03e0581ee8 | ||
|
|
1552e13799 | ||
|
|
ade0f1342c | ||
|
|
04f7b08ab9 | ||
|
|
ecbdffc84f | ||
|
|
aa61f25795 | ||
|
|
d406409b72 | ||
|
|
bf79592465 | ||
|
|
d3d7199507 | ||
|
|
743a9cf258 | ||
|
|
a05358f47f | ||
|
|
3a4aba1df2 | ||
|
|
12d71b37bb | ||
|
|
34e0c97dbc | ||
|
|
cf31b736f7 | ||
|
|
1cb512f336 | ||
|
|
4e6a562efe | ||
|
|
c1dea842ff | ||
|
|
c42d54af17 | ||
|
|
f3a5ebc315 | ||
|
|
f73d6fe4ce | ||
|
|
c6d61870e2 | ||
|
|
1f938c08d2 | ||
|
|
f2ce06c7b0 | ||
|
|
7c29c6d7a6 | ||
|
|
eab06eb1d9 | ||
|
|
c2537fad43 | ||
|
|
977856407e | ||
|
|
7070038c92 | ||
|
|
b059c1fce7 | ||
|
|
03c6d6285c | ||
|
|
60c546196a | ||
|
|
8aa2158418 | ||
|
|
5ae0768ce4 | ||
|
|
44e5a962e6 | ||
|
|
3944234bab | ||
|
|
ac3b232dda | ||
|
|
743180342a | ||
|
|
3825ce523e | ||
|
|
b4cf7e440e | ||
|
|
bdb2d6c8de | ||
|
|
0c73252c9d | ||
|
|
c7aa805398 | ||
|
|
94ba24dadd | ||
|
|
046b43f135 | ||
|
|
426040f08f | ||
|
|
785b5ade6e | ||
|
|
344f63c6ca | ||
|
|
e30d5998e4 | ||
|
|
277ae27ca2 | ||
|
|
64fdc1d5b6 | ||
|
|
992448b560 | ||
|
|
802b0e4968 | ||
|
|
b8cdd38efb | ||
|
|
87f9ba380f | ||
|
|
12dae07108 | ||
|
|
cf0f442869 | ||
|
|
de9c4127a5 | ||
|
|
e7089fe45c | ||
|
|
901b6ffd28 | ||
|
|
edc380db80 | ||
|
|
33adfa443e | ||
|
|
9e5438906a | ||
|
|
fbe2907919 | ||
|
|
02f5a514ce | ||
|
|
4bd4d76276 | ||
|
|
7a7e820030 | ||
|
|
0e45500158 | ||
|
|
16c399876c | ||
|
|
3583e129d1 | ||
|
|
75b1da0f65 | ||
|
|
207a202477 | ||
|
|
0871c539ee | ||
|
|
b92664c52d | ||
|
|
19099e808c | ||
|
|
f29ac79bbd | ||
|
|
797ac5ead4 | ||
|
|
37c6cd43e0 | ||
|
|
01a1b9b2c1 | ||
|
|
d44437d543 | ||
|
|
6be029ff17 | ||
|
|
d59ecf790f | ||
|
|
bde7e55adb | ||
|
|
b7d31fabc5 | ||
|
|
1a223e23fb | ||
|
|
f2c03d0d0a | ||
|
|
6fa823417f | ||
|
|
8725a2d166 | ||
|
|
f9c97d29c8 | ||
|
|
5192233b59 | ||
|
|
d0d7b9cdcd | ||
|
|
8b051d6cc3 | ||
|
|
16d84a31ec | ||
|
|
4adff4aa8a | ||
|
|
7de3c67b1d | ||
|
|
60bd417d8b | ||
|
|
d31194dcf8 | ||
|
|
4cc6d6a398 | ||
|
|
b9eafb80fd | ||
|
|
5e7927f628 | ||
|
|
b75736568b | ||
|
|
360074effb | ||
|
|
b1922b7156 | ||
|
|
54fd7ea699 | ||
|
|
8564602c3b | ||
|
|
e604ef3af9 | ||
|
|
1f5101d9fd | ||
|
|
37540d1ff7 | ||
|
|
d9d24582bb | ||
|
|
a4a2acfaa5 | ||
|
|
55554a8653 | ||
|
|
d00ad02b05 | ||
|
|
233a1eb46e | ||
|
|
c656101862 | ||
|
|
3248a05406 | ||
|
|
baaf87aa23 | ||
|
|
8991f58b97 | ||
|
|
2579f86bcd | ||
|
|
5423fafc83 | ||
|
|
8a01e48339 | ||
|
|
1b43217c05 | ||
|
|
1b6cde7032 | ||
|
|
bd0bcdb0ed | ||
|
|
2b5699117f | ||
|
|
0857ddadc5 | ||
|
|
3e3618b3ff | ||
|
|
2163580b16 | ||
|
|
4778d61bdd | ||
|
|
46c5d515bf | ||
|
|
73bd12ebbe | ||
|
|
cc829e7fdb | ||
|
|
fdf5bf7e6a | ||
|
|
c94536a2d6 | ||
|
|
9db474051b | ||
|
|
1d0bb5a7a6 | ||
|
|
6823847978 | ||
|
|
3a7bdf43f5 | ||
|
|
d5e297147f | ||
|
|
1c4923e1c8 | ||
|
|
ee80ba6693 | ||
|
|
fd306c97f4 | ||
|
|
b3483a157c | ||
|
|
ac66e912d5 | ||
|
|
5e37a7b78c | ||
|
|
00278f43bb | ||
|
|
ac3d2a338b | ||
|
|
58f07ff709 | ||
|
|
db0f7a8b23 | ||
|
|
f5ad4c8bd9 | ||
|
|
172984978f | ||
|
|
ba26ca4aee | ||
|
|
1ae8e0c53a | ||
|
|
a70f80df95 | ||
|
|
821a4880bd | ||
|
|
7f17d4b61d | ||
|
|
ebf4a23b18 | ||
|
|
370d4ce200 | ||
|
|
2284131bfc | ||
|
|
2f7045f724 | ||
|
|
5d359ea2f2 | ||
|
|
72c6a74505 | ||
|
|
941033e373 | ||
|
|
83884ca36f | ||
|
|
f503c65924 | ||
|
|
c1cd371786 | ||
|
|
7cb2d83608 | ||
|
|
ae3abf50d8 | ||
|
|
edf2ec7d4c | ||
|
|
2919e1976a | ||
|
|
fd3ca0303f | ||
|
|
61e4a1d16a | ||
|
|
2471ae451c | ||
|
|
d6b31d8932 | ||
|
|
95ad7a6cae | ||
|
|
54a7da364c | ||
|
|
003a39740a | ||
|
|
33ec545d1f | ||
|
|
b7cc597d28 | ||
|
|
ef306245e0 | ||
|
|
615d1d7ab4 | ||
|
|
45983e11e9 | ||
|
|
06e1db54a7 | ||
|
|
8e09256c8c | ||
|
|
79ef10bfc3 | ||
|
|
986ca19516 | ||
|
|
42d8d77938 | ||
|
|
e4c90be58a | ||
|
|
7433d85458 | ||
|
|
1dffdea27c | ||
|
|
1f40a3c70e | ||
|
|
92c31278ee | ||
|
|
a7c5b8d78b | ||
|
|
d1b28e6431 | ||
|
|
ecf179df0c | ||
|
|
e54c9da2a8 | ||
|
|
bcbc6a330e | ||
|
|
f213f4bcc8 | ||
|
|
0ee6ca1767 | ||
|
|
762082b15a | ||
|
|
fcd690d04c | ||
|
|
8de4b360e8 | ||
|
|
7644e797fe | ||
|
|
4fb91135d1 | ||
|
|
f45a9b351d |
6
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
6
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -35,10 +35,8 @@ body:
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Windows: `C:\Users\YOU\AppData\Local\Zed\logs\Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
|
||||
39
.github/workflows/cherry_pick.yml
vendored
Normal file
39
.github/workflows/cherry_pick.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
# Generated from xtask::workflows::cherry_pick
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: cherry_pick
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
commit:
|
||||
description: commit
|
||||
required: true
|
||||
type: string
|
||||
branch:
|
||||
description: branch
|
||||
required: true
|
||||
type: string
|
||||
channel:
|
||||
description: channel
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
run_cherry_pick:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: get-app-token
|
||||
name: cherry_pick::run_cherry_pick::authenticate_as_zippy
|
||||
uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1
|
||||
with:
|
||||
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
|
||||
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
|
||||
- name: cherry_pick::run_cherry_pick::cherry_pick
|
||||
run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GIT_COMMITTER_NAME: Zed Zippy
|
||||
GIT_COMMITTER_EMAIL: hi@zed.dev
|
||||
GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
|
||||
869
.github/workflows/ci.yml
vendored
869
.github/workflows/ci.yml
vendored
@@ -1,869 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "v[0-9]+.[0-9]+.x"
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
name: Decide which jobs to run
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
run_license: ${{ steps.filter.outputs.run_license }}
|
||||
run_docs: ${{ steps.filter.outputs.run_docs }}
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_actionlint: ${{ steps.filter.outputs.run_actionlint }}
|
||||
runs-on:
|
||||
- namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- name: Fetch git history and generate output filters
|
||||
id: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
# Specify anything which should potentially skip full test suite in this regex:
|
||||
# - docs/
|
||||
# - script/update_top_ranking_issues/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qvP "$SKIP_REGEX" && \
|
||||
echo "run_tests=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_tests=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^docs/' && \
|
||||
echo "run_docs=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_docs=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^\.github/(workflows/|actions/|actionlint.yml)' && \
|
||||
echo "run_actionlint=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_actionlint=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^(Cargo.lock|script/.*licenses)' && \
|
||||
echo "run_license=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_license=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "$CHANGED_FILES" | grep -qP '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' && \
|
||||
echo "$GITHUB_REF_NAME" | grep -qvP '^v[0-9]+\.[0-9]+\.[0-9x](-pre)?$' && \
|
||||
echo "run_nix=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "run_nix=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0 # fetch full history
|
||||
|
||||
- name: Remove untracked files
|
||||
run: git clean -df
|
||||
|
||||
- name: Find modified migrations
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
export SQUAWK_GITHUB_TOKEN=${{ github.token }}
|
||||
. ./script/squawk
|
||||
|
||||
- name: Ensure fresh merge
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: "crates/proto/proto/"
|
||||
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
|
||||
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Prettier Check on default.json
|
||||
run: |
|
||||
pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
|
||||
- name: Check modifier use in keymaps
|
||||
run: script/check-keymaps
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Check for typos
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1
|
||||
with:
|
||||
config: ./typos.toml
|
||||
|
||||
check_docs:
|
||||
timeout-minutes: 60
|
||||
name: Check docs
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true')
|
||||
runs-on:
|
||||
- namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build docs
|
||||
uses: ./.github/actions/build_docs
|
||||
|
||||
actionlint:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true'
|
||||
needs: [job_spec]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Download actionlint
|
||||
id: get_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash
|
||||
- name: Check workflow files
|
||||
run: ${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash
|
||||
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Check that Cargo.lock is up to date
|
||||
run: |
|
||||
cargo update --locked --workspace
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Install cargo-machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
|
||||
with:
|
||||
command: install
|
||||
args: cargo-machete@0.7.0
|
||||
|
||||
- name: Check unused dependencies
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
|
||||
with:
|
||||
command: machete
|
||||
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
fi
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4
|
||||
with:
|
||||
license-check: false
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build collab
|
||||
run: cargo build -p collab
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build --workspace --bins --all-features
|
||||
cargo check -p gpui --features "macos-blade"
|
||||
cargo check -p workspace
|
||||
cargo build -p remote_server
|
||||
cargo check -p gpui --examples
|
||||
|
||||
# Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: cargo clippy
|
||||
run: ./script/clippy
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
cargo build -p zed
|
||||
cargo check -p workspace
|
||||
cargo check -p gpui --examples
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
doctests:
|
||||
# Nextest currently doesn't support doctests, so run them separately and in parallel.
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run doctests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Run doctests
|
||||
run: cargo test --workspace --doc --no-fail-fast
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Clang & Mold
|
||||
run: ./script/remote-server && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Build Remote Server
|
||||
run: cargo build -p remote_server
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy and tests
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
steps:
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
$RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE
|
||||
Write-Output `
|
||||
"RUSTUP_HOME=$RunnerDir\.rustup" `
|
||||
"CARGO_HOME=$RunnerDir\.cargo" `
|
||||
"PATH=$RunnerDir\.cargo\bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
|
||||
- name: cargo clippy
|
||||
run: |
|
||||
.\script\clippy.ps1
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests_windows
|
||||
|
||||
- name: Build Zed
|
||||
run: cargo build
|
||||
|
||||
- name: Limit target directory size
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
|
||||
tests_pass:
|
||||
name: Tests Pass
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- check_docs
|
||||
- actionlint
|
||||
- migration_checks
|
||||
# run_tests: If adding required tests, add them here and to script below.
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_tests
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
if [[ "${{ needs.job_spec.outputs.run_docs }}" == "true" ]]; then
|
||||
[[ "${{ needs.check_docs.result }}" != 'success' ]] && { RET_CODE=1; echo "docs checks failed"; }
|
||||
fi
|
||||
|
||||
if [[ "${{ needs.job_spec.outputs.run_actionlint }}" == "true" ]]; then
|
||||
[[ "${{ needs.actionlint.result }}" != 'success' ]] && { RET_CODE=1; echo "actionlint checks failed"; }
|
||||
fi
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
|
||||
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
fi
|
||||
exit $RET_CODE
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 120
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# We need to fetch more than one commit so that `script/draft-release-notes`
|
||||
# is able to diff between the current and previous tag.
|
||||
#
|
||||
# 25 was chosen arbitrarily.
|
||||
fetch-depth: 25
|
||||
clean: false
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Draft release notes
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true
|
||||
script/create-draft-release target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Rename binaries
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
run: |
|
||||
mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
name: Upload app bundle to release
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-macos-x86_64.gz
|
||||
target/zed-remote-server-macos-aarch64.gz
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-x86_x64:
|
||||
timeout-minutes: 60
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [linux_tests]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
bundle-linux-aarch64: # this runs on ubuntu22.04
|
||||
timeout-minutes: 60
|
||||
name: Linux arm64 release bundle
|
||||
runs-on:
|
||||
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundles
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
if: |
|
||||
false && ( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz
|
||||
path: out/zed-remote-server-freebsd-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: |
|
||||
out/zed-remote-server-freebsd-x86_64.gz
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
nix-build:
|
||||
name: Build with Nix
|
||||
uses: ./.github/workflows/nix.yml
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries' &&
|
||||
(contains(github.event.pull_request.labels.*.name, 'run-nix') ||
|
||||
needs.job_spec.outputs.run_nix == 'true')
|
||||
secrets: inherit
|
||||
with:
|
||||
flake-output: debug
|
||||
# excludes the final package to only cache dependencies
|
||||
cachix-filter: "-zed-editor-[0-9.]*-nightly"
|
||||
|
||||
bundle-windows-x64:
|
||||
timeout-minutes: 120
|
||||
name: Create a Windows installer
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
if: |
|
||||
( startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
|
||||
needs: [windows_tests]
|
||||
env:
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Determine version and release channel
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel.ps1
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
- name: Upload installer (x86_64) to Workflow - zed (run-bundling)
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
|
||||
path: ${{ env.SETUP_PATH }}
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
files: ${{ env.SETUP_PATH }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: |
|
||||
false
|
||||
&& startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64]
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
steps:
|
||||
- name: gh release
|
||||
run: gh release edit "$GITHUB_REF_NAME" --draft=false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
with:
|
||||
environment: production
|
||||
78
.github/workflows/compare_perf.yml
vendored
Normal file
78
.github/workflows/compare_perf.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# Generated from xtask::workflows::compare_perf
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: compare_perf
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
head:
|
||||
description: head
|
||||
required: true
|
||||
type: string
|
||||
base:
|
||||
description: base
|
||||
required: true
|
||||
type: string
|
||||
crate_name:
|
||||
description: crate_name
|
||||
type: string
|
||||
default: ''
|
||||
jobs:
|
||||
run_perf:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
run: cargo install hyperfine
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::cargo_perf_test
|
||||
run: |2-
|
||||
|
||||
if [ -n "${{ inputs.crate_name }}" ]; then
|
||||
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
|
||||
else
|
||||
cargo perf-test -p vim -- --json=${{ inputs.base }};
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::cargo_perf_test
|
||||
run: |2-
|
||||
|
||||
if [ -n "${{ inputs.crate_name }}" ]; then
|
||||
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
|
||||
else
|
||||
cargo perf-test -p vim -- --json=${{ inputs.head }};
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::compare_runs
|
||||
run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact results.md'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: results.md
|
||||
path: results.md
|
||||
if-no-files-found: error
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
66
.github/workflows/danger.yml
vendored
66
.github/workflows/danger.yml
vendored
@@ -1,42 +1,40 @@
|
||||
name: Danger
|
||||
|
||||
# Generated from xtask::workflows::danger
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: danger
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
danger:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: "script/danger/pnpm-lock.yaml"
|
||||
|
||||
- run: pnpm install --dir script/danger
|
||||
|
||||
- name: Run Danger
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
env:
|
||||
# This GitHub token is not used, but the value needs to be here to prevent
|
||||
# Danger from throwing an error.
|
||||
GITHUB_TOKEN: "not_a_real_token"
|
||||
# All requests are instead proxied through an instance of
|
||||
# https://github.com/maxdeviant/danger-proxy that allows Danger to securely
|
||||
# authenticate with GitHub while still being able to run on PRs from forks.
|
||||
DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github"
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_pnpm
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
with:
|
||||
version: '9'
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: pnpm
|
||||
cache-dependency-path: script/danger/pnpm-lock.yaml
|
||||
- name: danger::danger_job::install_deps
|
||||
run: pnpm install --dir script/danger
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: danger::danger_job::run
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: not_a_real_token
|
||||
DANGER_GITHUB_API_BASE_URL: https://danger-proxy.fly.dev/github
|
||||
|
||||
2
.github/workflows/deploy_collab.yml
vendored
2
.github/workflows/deploy_collab.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
run: script/clear-target-dir-if-larger-than 300
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
71
.github/workflows/eval.yml
vendored
71
.github/workflows/eval.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Run Agent Eval
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- "**"
|
||||
types: [synchronize, reopened, labeled]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: 1
|
||||
|
||||
jobs:
|
||||
run_eval:
|
||||
timeout-minutes: 60
|
||||
name: Run Agent Eval
|
||||
if: >
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Compile eval
|
||||
run: cargo build --package=eval
|
||||
|
||||
- name: Run eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
69
.github/workflows/nix.yml
vendored
69
.github/workflows/nix.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: "Nix build"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
flake-output:
|
||||
type: string
|
||||
default: "default"
|
||||
cachix-filter:
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
jobs:
|
||||
nix-build:
|
||||
timeout-minutes: 60
|
||||
name: (${{ matrix.system.os }}) Nix Build
|
||||
continue-on-error: true # TODO: remove when we want this to start blocking CI
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system:
|
||||
- os: x86 Linux
|
||||
runner: namespace-profile-16x32-ubuntu-2204
|
||||
install_nix: true
|
||||
- os: arm Mac
|
||||
runner: [macOS, ARM64, test]
|
||||
install_nix: false
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ${{ matrix.system.runner }}
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
# on our macs we manually install nix. for some reason the cachix action is running
|
||||
# under a non-login /bin/bash shell which doesn't source the proper script to add the
|
||||
# nix profile to PATH, so we manually add them here
|
||||
- name: Set path
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
name: zed
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
pushFilter: "${{ inputs.cachix-filter }}"
|
||||
cachixArgs: "-v"
|
||||
|
||||
- run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
|
||||
|
||||
- name: Limit /nix/store to 50GB on macs
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
488
.github/workflows/release.yml
vendored
Normal file
488
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,488 @@
|
||||
# Generated from xtask::workflows::release
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: release
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
jobs:
|
||||
run_tests_mac:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
check_scripts:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_tests::check_scripts::run_shellcheck
|
||||
run: ./script/shellcheck-scripts error
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get_actionlint
|
||||
name: run_tests::check_scripts::download_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::run_actionlint
|
||||
run: |
|
||||
${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::check_xtask_workflows
|
||||
run: |
|
||||
cargo xtask workflows
|
||||
if ! git diff --exit-code .github; then
|
||||
echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
|
||||
echo "Please run 'cargo xtask workflows' locally and commit the changes"
|
||||
exit 1
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
create_draft_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 25
|
||||
ref: ${{ github.ref }}
|
||||
- name: script/determine-release-channel
|
||||
run: script/determine-release-channel
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: mkdir -p target/
|
||||
run: mkdir -p target/
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_draft_release::generate_release_notes
|
||||
run: node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_draft_release::create_release
|
||||
run: script/create-draft-release target/release-notes.md
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
timeout-minutes: 60
|
||||
bundle_linux_aarch64:
|
||||
needs:
|
||||
- run_tests_linux
|
||||
- check_scripts
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
needs:
|
||||
- run_tests_linux
|
||||
- check_scripts
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
needs:
|
||||
- run_tests_mac
|
||||
- check_scripts
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_x86_64:
|
||||
needs:
|
||||
- run_tests_mac
|
||||
- check_scripts
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
needs:
|
||||
- run_tests_windows
|
||||
- check_scripts
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
needs:
|
||||
- run_tests_windows
|
||||
- check_scripts
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
upload_release_assets:
|
||||
needs:
|
||||
- create_draft_release
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: release::download_workflow_artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
path: ./artifacts/
|
||||
- name: ls -lR ./artifacts
|
||||
run: ls -lR ./artifacts
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::prep_release_artifacts
|
||||
run: |-
|
||||
mkdir -p release-artifacts/
|
||||
|
||||
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
|
||||
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
|
||||
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
|
||||
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
|
||||
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
|
||||
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
|
||||
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
|
||||
run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
auto_release_preview:
|
||||
needs:
|
||||
- upload_release_assets
|
||||
if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
|
||||
run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
752
.github/workflows/release_nightly.yml
vendored
752
.github/workflows/release_nightly.yml
vendored
@@ -1,256 +1,276 @@
|
||||
name: Release Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
|
||||
- cron: "0 7 * * *"
|
||||
push:
|
||||
tags:
|
||||
- "nightly"
|
||||
|
||||
# Generated from xtask::workflows::release_nightly
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: release_nightly
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
|
||||
RUST_BACKTRACE: '1'
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- nightly
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
jobs:
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and Clippy lints
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- macOS
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run style checks
|
||||
uses: ./.github/actions/check_style
|
||||
|
||||
- name: Run clippy
|
||||
run: ./script/clippy
|
||||
|
||||
tests:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
name: Run tests
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- macOS
|
||||
needs: style
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
|
||||
windows-tests:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
name: Run tests on Windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests_windows
|
||||
|
||||
- name: Limit target directory size
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 1024
|
||||
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 60
|
||||
name: Create a macOS bundle
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- self-mini-macos
|
||||
needs: tests
|
||||
bundle_linux_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Create macOS app bundle
|
||||
run: script/bundle-mac
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly macos
|
||||
|
||||
bundle-linux-x86:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux *.tar.gz bundle for x86
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
needs: tests
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
bundle-linux-arm:
|
||||
timeout-minutes: 60
|
||||
name: Create a Linux *.tar.gz bundle for ARM
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
|
||||
needs: tests
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Set release channel to nightly
|
||||
run: |
|
||||
set -euo pipefail
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
if: false && github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
name: Build Zed on FreeBSD
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
prepare: |
|
||||
pkg install -y \
|
||||
bash curl jq git \
|
||||
rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
|
||||
run: |
|
||||
freebsd-version
|
||||
sysctl hw.model
|
||||
sysctl hw.ncpu
|
||||
sysctl hw.physmem
|
||||
sysctl hw.usermem
|
||||
git config --global --add safe.directory /home/runner/work/zed/zed
|
||||
rustup-init --profile minimal --default-toolchain none -y
|
||||
. "$HOME/.cargo/env"
|
||||
./script/bundle-freebsd
|
||||
mkdir -p out/
|
||||
mv "target/zed-remote-server-freebsd-x86_64.gz" out/
|
||||
rm -rf target/
|
||||
cargo clean
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly freebsd
|
||||
|
||||
bundle-nix:
|
||||
name: Build and cache Nix package
|
||||
needs: tests
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/nix.yml
|
||||
|
||||
bundle-windows-x64:
|
||||
timeout-minutes: 60
|
||||
name: Create a Windows installer
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: [self-32vcpu-windows-2022]
|
||||
needs: windows-tests
|
||||
bundle_mac_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
set -eu
|
||||
version=$(git rev-parse --short HEAD)
|
||||
echo "Publishing version: ${version} on release channel nightly"
|
||||
echo "nightly" > crates/zed/RELEASE_CHANNEL
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
@@ -259,65 +279,211 @@ jobs:
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Set release channel to nightly
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
|
||||
- name: Setup Sentry CLI
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
|
||||
with:
|
||||
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
- name: Upload Zed Nightly
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/upload-nightly.ps1 windows
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
needs:
|
||||
- bundle-mac
|
||||
- bundle-linux-x86
|
||||
- bundle-linux-arm
|
||||
- bundle-windows-x64
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_bundling::set_release_channel_to_nightly
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$version = git rev-parse --short HEAD
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
build_nix_linux_x86_64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::install_nix
|
||||
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#default -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
build_nix_mac_aarch64:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::set_path
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#default -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::limit_store
|
||||
run: |-
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
update_nightly_tag:
|
||||
needs:
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
- name: release::download_workflow_artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
|
||||
with:
|
||||
path: ./artifacts/
|
||||
- name: ls -lR ./artifacts
|
||||
run: ls -lR ./artifacts
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::prep_release_artifacts
|
||||
run: |-
|
||||
mkdir -p release-artifacts/
|
||||
|
||||
- name: Update nightly tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
|
||||
- name: Create Sentry release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
with:
|
||||
environment: production
|
||||
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
|
||||
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
|
||||
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
|
||||
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
|
||||
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
|
||||
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
|
||||
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
|
||||
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/upload-nightly
|
||||
run: ./script/upload-nightly
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
- name: release_nightly::update_nightly_tag_job::update_nightly_tag
|
||||
run: |
|
||||
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
|
||||
echo "Nightly tag already points to current commit. Skipping tagging."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name github-actions
|
||||
git config user.email github-actions@github.com
|
||||
git tag -f nightly
|
||||
git push origin nightly --force
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
timeout-minutes: 60
|
||||
|
||||
62
.github/workflows/run_agent_evals.yml
vendored
Normal file
62
.github/workflows/run_agent_evals.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_agent_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
branches:
|
||||
- '**'
|
||||
schedule:
|
||||
- cron: 0 0 * * *
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
agent_evals:
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build --package=eval
|
||||
run: cargo build --package=eval
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::agent_evals::run_eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
263
.github/workflows/run_bundling.yml
vendored
Normal file
263
.github/workflows/run_bundling.yml
vendored
Normal file
@@ -0,0 +1,263 @@
|
||||
# Generated from xtask::workflows::run_bundling
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_bundling
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- synchronize
|
||||
jobs:
|
||||
bundle_linux_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-aarch64.tar.gz
|
||||
path: target/release/zed-linux-aarch64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-aarch64.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_linux_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-linux-x86_64.tar.gz
|
||||
path: target/release/zed-linux-x86_64.tar.gz
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-linux-x86_64.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac aarch64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-aarch64.gz
|
||||
path: target/zed-remote-server-macos-aarch64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_mac_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_bundling::bundle_mac::bundle_mac
|
||||
run: ./script/bundle-mac x86_64-apple-darwin
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.dmg'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.dmg
|
||||
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
if-no-files-found: error
|
||||
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: zed-remote-server-macos-x86_64.gz
|
||||
path: target/zed-remote-server-macos-x86_64.gz
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture aarch64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-aarch64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-aarch64.exe
|
||||
path: target/Zed-aarch64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
bundle_windows_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_sentry
|
||||
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
|
||||
with:
|
||||
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
- name: run_bundling::bundle_windows::bundle_windows
|
||||
run: script/bundle-windows.ps1 -Architecture x86_64
|
||||
shell: pwsh
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
- name: '@actions/upload-artifact Zed-x86_64.exe'
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||
with:
|
||||
name: Zed-x86_64.exe
|
||||
path: target/Zed-x86_64.exe
|
||||
if-no-files-found: error
|
||||
timeout-minutes: 60
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
569
.github/workflows/run_tests.yml
vendored
Normal file
569
.github/workflows/run_tests.yml
vendored
Normal file
@@ -0,0 +1,569 @@
|
||||
# Generated from xtask::workflows::run_tests
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_tests
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
CARGO_INCREMENTAL: '0'
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- v[0-9]+.[0-9]+.x
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- id: filter
|
||||
name: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
check_pattern() {
|
||||
local output_name="$1"
|
||||
local pattern="$2"
|
||||
local grep_arg="$3"
|
||||
|
||||
echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
|
||||
echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "${output_name}=false" >> "$GITHUB_OUTPUT"
|
||||
}
|
||||
|
||||
check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP
|
||||
check_pattern "run_docs" '^docs/' -qP
|
||||
check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
|
||||
check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
|
||||
check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
|
||||
shell: bash -euxo pipefail {0}
|
||||
outputs:
|
||||
run_action_checks: ${{ steps.filter.outputs.run_action_checks }}
|
||||
run_docs: ${{ steps.filter.outputs.run_docs }}
|
||||
run_licenses: ${{ steps.filter.outputs.run_licenses }}
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_pnpm
|
||||
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
|
||||
with:
|
||||
version: '9'
|
||||
- name: ./script/prettier
|
||||
run: ./script/prettier
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/check-todos
|
||||
run: ./script/check-todos
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/check-keymaps
|
||||
run: ./script/check-keymaps
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_style::check_for_typos
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
|
||||
with:
|
||||
config: ./typos.toml
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
New-Item -ItemType Directory -Path "./../.cargo" -Force
|
||||
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
|
||||
shell: pwsh
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_mac:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
node-version: '20'
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
doctests:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: run_doctests
|
||||
name: run_tests::doctests::run_doctests
|
||||
run: |
|
||||
cargo test --workspace --doc --no-fail-fast
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_workspace_binaries:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build --workspace --bins --examples
|
||||
run: cargo build --workspace --bins --examples
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
check_dependencies:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: run_tests::check_dependencies::install_cargo_machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
|
||||
with:
|
||||
command: install
|
||||
args: cargo-machete@0.7.0
|
||||
- name: run_tests::check_dependencies::run_cargo_machete
|
||||
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
|
||||
with:
|
||||
command: machete
|
||||
- name: run_tests::check_dependencies::check_cargo_lock
|
||||
run: cargo update --locked --workspace
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_dependencies::check_vulnerable_dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8
|
||||
with:
|
||||
license-check: false
|
||||
timeout-minutes: 60
|
||||
check_docs:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_docs == 'true'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: run_tests::check_docs::lychee_link_check
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
|
||||
with:
|
||||
args: --no-progress --exclude '^http' './docs/src/**/*'
|
||||
fail: true
|
||||
jobSummary: false
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::install_mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
|
||||
with:
|
||||
mdbook-version: 0.4.37
|
||||
- name: run_tests::check_docs::build_docs
|
||||
run: |
|
||||
mkdir -p target/deploy
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::lychee_link_check
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
|
||||
with:
|
||||
args: --no-progress --exclude '^http' 'target/deploy/docs'
|
||||
fail: true
|
||||
jobSummary: false
|
||||
timeout-minutes: 60
|
||||
check_licenses:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_licenses == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: ./script/check-licenses
|
||||
run: ./script/check-licenses
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/generate-licenses
|
||||
run: ./script/generate-licenses
|
||||
shell: bash -euxo pipefail {0}
|
||||
check_scripts:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_action_checks == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: run_tests::check_scripts::run_shellcheck
|
||||
run: ./script/shellcheck-scripts error
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get_actionlint
|
||||
name: run_tests::check_scripts::download_actionlint
|
||||
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::run_actionlint
|
||||
run: |
|
||||
${{ steps.get_actionlint.outputs.executable }} -color
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_scripts::check_xtask_workflows
|
||||
run: |
|
||||
cargo xtask workflows
|
||||
if ! git diff --exit-code .github; then
|
||||
echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
|
||||
echo "Please run 'cargo xtask workflows' locally and commit the changes"
|
||||
exit 1
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
build_nix_linux_x86_64:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_nix == 'true'
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::install_nix
|
||||
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
pushFilter: -zed-editor-[0-9.]*-nightly
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#debug -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
build_nix_mac_aarch64:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_nix == 'true'
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: '1'
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: nix_build::build_nix::set_path
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
|
||||
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::cachix_action
|
||||
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
|
||||
with:
|
||||
name: zed
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
cachixArgs: -v
|
||||
pushFilter: -zed-editor-[0-9.]*-nightly
|
||||
- name: nix_build::build_nix::build
|
||||
run: nix build .#debug -L --accept-flake-config
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: nix_build::build_nix::limit_store
|
||||
run: |-
|
||||
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
|
||||
nix-collect-garbage -d || true
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
- run_tests_linux
|
||||
- run_tests_mac
|
||||
- doctests
|
||||
- check_workspace_binaries
|
||||
- check_postgres_and_protobuf_migrations
|
||||
- check_dependencies
|
||||
- check_docs
|
||||
- check_licenses
|
||||
- check_scripts
|
||||
- build_nix_linux_x86_64
|
||||
- build_nix_mac_aarch64
|
||||
if: github.repository_owner == 'zed-industries' && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
run: |
|
||||
set +x
|
||||
EXIT_CODE=0
|
||||
|
||||
check_result() {
|
||||
echo "* $1: $2"
|
||||
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
|
||||
}
|
||||
|
||||
check_result "orchestrate" "${{ needs.orchestrate.result }}"
|
||||
check_result "check_style" "${{ needs.check_style.result }}"
|
||||
check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
|
||||
check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
|
||||
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
|
||||
check_result "doctests" "${{ needs.doctests.result }}"
|
||||
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
|
||||
check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
|
||||
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
|
||||
check_result "check_docs" "${{ needs.check_docs.result }}"
|
||||
check_result "check_licenses" "${{ needs.check_licenses.result }}"
|
||||
check_result "check_scripts" "${{ needs.check_scripts.result }}"
|
||||
check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}"
|
||||
check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}"
|
||||
|
||||
exit $EXIT_CODE
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
63
.github/workflows/run_unit_evals.yml
vendored
Normal file
63
.github/workflows/run_unit_evals.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_agent_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
on:
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/run-unit-evals
|
||||
run: ./script/run-unit-evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
- name: run_agent_evals::unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
21
.github/workflows/script_checks.yml
vendored
21
.github/workflows/script_checks.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Script
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "script/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: "ShellCheck Scripts"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- name: Shellcheck ./scripts
|
||||
run: |
|
||||
./script/shellcheck-scripts error
|
||||
86
.github/workflows/unit_evals.yml
vendored
86
.github/workflows/unit_evals.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: Run Unit Evals
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
|
||||
- cron: "47 1 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# Allow only one workflow per any non-`main` branch.
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
unit_evals:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
timeout-minutes: 60
|
||||
name: Run unit evals
|
||||
runs-on:
|
||||
- namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
# cache-provider: "buildjet"
|
||||
|
||||
- name: Install Linux dependencies
|
||||
run: ./script/linux
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
|
||||
- name: Run unit evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)'
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
- name: Send failure message to Slack channel if needed
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
# to clean up the config file, I’ve included the cleanup code here as a precaution.
|
||||
# While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
195
Cargo.lock
generated
195
Cargo.lock
generated
@@ -210,9 +210,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "agent-client-protocol"
|
||||
version = "0.5.0"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f655394a107cd601bd2e5375c2d909ea83adc65678a0e0e8d77613d3c848a7d"
|
||||
checksum = "525705e39c11cd73f7bc784e3681a9386aa30c8d0630808d3dc2237eb4f9cb1b"
|
||||
dependencies = [
|
||||
"agent-client-protocol-schema",
|
||||
"anyhow",
|
||||
@@ -228,9 +228,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "agent-client-protocol-schema"
|
||||
version = "0.4.11"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61be4454304d7df1a5b44c4ae55e707ffe72eac4dfb1ef8762510ce8d8f6d924"
|
||||
checksum = "ecf16c18fea41282d6bbadd1549a06be6836bddb1893f44a6235f340fa24e2af"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"derive_more 2.0.1",
|
||||
@@ -266,6 +266,7 @@ dependencies = [
|
||||
"log",
|
||||
"nix 0.29.0",
|
||||
"project",
|
||||
"release_channel",
|
||||
"reqwest_client",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1338,6 +1339,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
]
|
||||
@@ -1349,6 +1351,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"log",
|
||||
"simplelog",
|
||||
"tempfile",
|
||||
"windows 0.61.3",
|
||||
"winresource",
|
||||
]
|
||||
@@ -3073,6 +3076,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"plist",
|
||||
"rayon",
|
||||
"release_channel",
|
||||
"serde",
|
||||
"tempfile",
|
||||
@@ -4526,13 +4530,15 @@ dependencies = [
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"json_dotpath",
|
||||
"language",
|
||||
"log",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shlex",
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"util",
|
||||
@@ -4758,7 +4764,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"sysinfo 0.37.2",
|
||||
"task",
|
||||
"tasks_ui",
|
||||
@@ -4902,6 +4907,18 @@ dependencies = [
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_setters"
|
||||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9"
|
||||
dependencies = [
|
||||
"darling 0.20.11",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deunicode"
|
||||
version = "1.6.2"
|
||||
@@ -4920,6 +4937,7 @@ dependencies = [
|
||||
"editor",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
@@ -5820,8 +5838,6 @@ name = "extension"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"dap",
|
||||
@@ -6942,6 +6958,33 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gh-workflow"
|
||||
version = "0.8.0"
|
||||
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"derive_more 2.0.1",
|
||||
"derive_setters",
|
||||
"gh-workflow-macros",
|
||||
"indexmap 2.11.4",
|
||||
"merge",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml",
|
||||
"strum_macros 0.27.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gh-workflow-macros"
|
||||
version = "0.8.0"
|
||||
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"quote",
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gif"
|
||||
version = "0.13.3"
|
||||
@@ -7035,6 +7078,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
]
|
||||
|
||||
@@ -7071,9 +7115,10 @@ dependencies = [
|
||||
"notifications",
|
||||
"panel",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"recent_projects",
|
||||
"remote",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -7225,6 +7270,7 @@ dependencies = [
|
||||
"async-task",
|
||||
"backtrace",
|
||||
"bindgen 0.71.1",
|
||||
"bitflags 2.9.4",
|
||||
"blade-graphics",
|
||||
"blade-macros",
|
||||
"blade-util",
|
||||
@@ -7304,6 +7350,7 @@ dependencies = [
|
||||
"wayland-cursor",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-protocols-plasma",
|
||||
"wayland-protocols-wlr",
|
||||
"windows 0.61.3",
|
||||
"windows-core 0.61.2",
|
||||
"windows-numerics",
|
||||
@@ -9455,7 +9502,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.95.1"
|
||||
source = "git+https://github.com/zed-industries/lsp-types?rev=0874f8742fe55b4dc94308c1e3c0069710d8eeaf#0874f8742fe55b4dc94308c1e3c0069710d8eeaf"
|
||||
source = "git+https://github.com/zed-industries/lsp-types?rev=b71ab4eeb27d9758be8092020a46fe33fbca4e33#b71ab4eeb27d9758be8092020a46fe33fbca4e33"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
@@ -9812,6 +9859,28 @@ dependencies = [
|
||||
"gpui",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "merge"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10bbef93abb1da61525bbc45eeaff6473a41907d19f8f9aa5168d214e10693e9"
|
||||
dependencies = [
|
||||
"merge_derive",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "merge_derive"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "209d075476da2e63b4b29e72a2ef627b840589588e71400a25e3565c4f849d07"
|
||||
dependencies = [
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "metal"
|
||||
version = "0.29.0"
|
||||
@@ -9838,7 +9907,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"settings_json",
|
||||
"streaming-iterator",
|
||||
"tree-sitter",
|
||||
"tree-sitter-json",
|
||||
@@ -12801,6 +12870,30 @@ dependencies = [
|
||||
"toml_edit 0.23.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
@@ -12927,7 +13020,6 @@ dependencies = [
|
||||
"settings",
|
||||
"sha2",
|
||||
"shellexpand 2.1.2",
|
||||
"shlex",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"snippet",
|
||||
@@ -13040,7 +13132,6 @@ dependencies = [
|
||||
"paths",
|
||||
"rope",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"text",
|
||||
"util",
|
||||
"uuid",
|
||||
@@ -13837,10 +13928,10 @@ dependencies = [
|
||||
"prost 0.9.0",
|
||||
"release_channel",
|
||||
"rpc",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"shlex",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"thiserror 2.0.17",
|
||||
@@ -13894,6 +13985,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"proto",
|
||||
"rayon",
|
||||
"release_channel",
|
||||
"remote",
|
||||
"reqwest_client",
|
||||
@@ -14227,8 +14319,6 @@ dependencies = [
|
||||
"log",
|
||||
"rand 0.9.2",
|
||||
"rayon",
|
||||
"regex",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
"unicode-segmentation",
|
||||
"util",
|
||||
@@ -15226,6 +15316,19 @@ dependencies = [
|
||||
"syn 2.0.106",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.34+deprecated"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
|
||||
dependencies = [
|
||||
"indexmap 2.11.4",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
"unsafe-libyaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serial2"
|
||||
version = "0.2.33"
|
||||
@@ -15262,6 +15365,7 @@ dependencies = [
|
||||
"indoc",
|
||||
"inventory",
|
||||
"log",
|
||||
"migrator",
|
||||
"paths",
|
||||
"pretty_assertions",
|
||||
"release_channel",
|
||||
@@ -15270,17 +15374,31 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"serde_path_to_error",
|
||||
"serde_repr",
|
||||
"serde_with",
|
||||
"settings_json",
|
||||
"settings_macros",
|
||||
"smallvec",
|
||||
"strum 0.27.2",
|
||||
"unindent",
|
||||
"util",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "settings_json"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"pretty_assertions",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"serde_path_to_error",
|
||||
"tree-sitter",
|
||||
"tree-sitter-json",
|
||||
"unindent",
|
||||
"util",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -15342,6 +15460,7 @@ dependencies = [
|
||||
"session",
|
||||
"settings",
|
||||
"strum 0.27.2",
|
||||
"telemetry",
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
@@ -16383,7 +16502,7 @@ dependencies = [
|
||||
"editor",
|
||||
"file_icons",
|
||||
"gpui",
|
||||
"multi_buffer",
|
||||
"language",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
@@ -17035,6 +17154,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"rand 0.9.2",
|
||||
"regex",
|
||||
"rope",
|
||||
"smallvec",
|
||||
"sum_tree",
|
||||
@@ -17327,6 +17447,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"auto_update",
|
||||
"call",
|
||||
"channel",
|
||||
"chrono",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
@@ -17928,7 +18049,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter-gomod"
|
||||
version = "1.1.1"
|
||||
source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c#6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c"
|
||||
source = "git+https://github.com/camdencheek/tree-sitter-go-mod?rev=2e886870578eeba1927a2dc4bd2e2b3f598c5f9a#2e886870578eeba1927a2dc4bd2e2b3f598c5f9a"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -18397,6 +18518,12 @@ version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
|
||||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.9.0"
|
||||
@@ -18663,6 +18790,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"settings_ui",
|
||||
"task",
|
||||
"text",
|
||||
"theme",
|
||||
@@ -19370,6 +19498,19 @@ dependencies = [
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-wlr"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "efd94963ed43cf9938a090ca4f7da58eb55325ec8200c3848963e98dc25b78ec"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols 0.32.9",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-scanner"
|
||||
version = "0.31.7"
|
||||
@@ -20803,9 +20944,12 @@ name = "xtask"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"backtrace",
|
||||
"cargo_metadata",
|
||||
"cargo_toml",
|
||||
"clap",
|
||||
"gh-workflow",
|
||||
"indexmap 2.11.4",
|
||||
"indoc",
|
||||
"toml 0.8.23",
|
||||
"toml_edit 0.22.27",
|
||||
@@ -20991,7 +21135,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.211.0"
|
||||
version = "0.212.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -21083,6 +21227,7 @@ dependencies = [
|
||||
"project_symbols",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rayon",
|
||||
"recent_projects",
|
||||
"release_channel",
|
||||
"remote",
|
||||
@@ -21518,6 +21663,7 @@ dependencies = [
|
||||
"clock",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction",
|
||||
"edit_prediction_context",
|
||||
"feature_flags",
|
||||
@@ -21531,6 +21677,7 @@ dependencies = [
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"release_channel",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -21545,6 +21692,7 @@ dependencies = [
|
||||
name = "zeta2_tools"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"client",
|
||||
@@ -21562,6 +21710,7 @@ dependencies = [
|
||||
"ordered-float 2.10.1",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -21593,6 +21742,7 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"indoc",
|
||||
"language",
|
||||
"language_extension",
|
||||
"language_model",
|
||||
@@ -21603,8 +21753,10 @@ dependencies = [
|
||||
"ordered-float 2.10.1",
|
||||
"paths",
|
||||
"polars",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"pulldown-cmark 0.12.2",
|
||||
"release_channel",
|
||||
"reqwest_client",
|
||||
"serde",
|
||||
@@ -21614,6 +21766,7 @@ dependencies = [
|
||||
"smol",
|
||||
"soa-rs",
|
||||
"terminal_view",
|
||||
"toml 0.8.23",
|
||||
"util",
|
||||
"watch",
|
||||
"zeta",
|
||||
|
||||
13
Cargo.toml
13
Cargo.toml
@@ -148,6 +148,7 @@ members = [
|
||||
"crates/semantic_version",
|
||||
"crates/session",
|
||||
"crates/settings",
|
||||
"crates/settings_json",
|
||||
"crates/settings_macros",
|
||||
"crates/settings_profile_selector",
|
||||
"crates/settings_ui",
|
||||
@@ -380,6 +381,7 @@ search = { path = "crates/search" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_json = { path = "crates/settings_json" }
|
||||
settings_macros = { path = "crates/settings_macros" }
|
||||
settings_ui = { path = "crates/settings_ui" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
@@ -438,7 +440,7 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agent-client-protocol = { version = "0.5.0", features = ["unstable"] }
|
||||
agent-client-protocol = { version = "0.7.0", features = ["unstable"] }
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.25.1-rc1"
|
||||
any_vec = "0.14"
|
||||
@@ -506,6 +508,7 @@ fork = "0.2.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "3eaa84abca0778eb54272f45a312cb24f9a0b435" }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
@@ -534,7 +537,7 @@ libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "0874f8742fe55b4dc94308c1e3c0069710d8eeaf" }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "b71ab4eeb27d9758be8092020a46fe33fbca4e33" }
|
||||
mach2 = "0.5"
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
metal = "0.29"
|
||||
@@ -677,7 +680,7 @@ tree-sitter-elixir = "0.3"
|
||||
tree-sitter-embedded-template = "0.23.0"
|
||||
tree-sitter-gitcommit = { git = "https://github.com/zed-industries/tree-sitter-git-commit", rev = "88309716a69dd13ab83443721ba6e0b491d37ee9" }
|
||||
tree-sitter-go = "0.23"
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "6efb59652d30e0e9cd5f3b3a669afd6f1a926d3c", package = "tree-sitter-gomod" }
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "2e886870578eeba1927a2dc4bd2e2b3f598c5f9a", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/zed-industries/tree-sitter-go-work", rev = "acb0617bf7f4fda02c6217676cc64acb89536dc7" }
|
||||
tree-sitter-heex = { git = "https://github.com/zed-industries/tree-sitter-heex", rev = "1dd45142fbb05562e35b2040c6129c9bca346592" }
|
||||
tree-sitter-html = "0.23"
|
||||
@@ -775,6 +778,8 @@ windows-capture = { git = "https://github.com/zed-industries/windows-capture.git
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
# https://github.com/rust-lang/cargo/issues/16104
|
||||
incremental = false
|
||||
codegen-units = 16
|
||||
|
||||
# mirror configuration for crates compiled for the build platform
|
||||
@@ -834,7 +839,7 @@ ui_input = { codegen-units = 1 }
|
||||
zed_actions = { codegen-units = 1 }
|
||||
|
||||
[profile.release]
|
||||
debug = "limited"
|
||||
debug = "full"
|
||||
lto = "thin"
|
||||
codegen-units = 1
|
||||
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
app: postgrest crates/collab/postgrest_app.conf
|
||||
llm: postgrest crates/collab/postgrest_llm.conf
|
||||
@@ -1,2 +1 @@
|
||||
postgrest_llm: postgrest crates/collab/postgrest_llm.conf
|
||||
website: cd ../zed.dev; npm run dev -- --port=3000
|
||||
|
||||
117
REVIEWERS.conl
Normal file
117
REVIEWERS.conl
Normal file
@@ -0,0 +1,117 @@
|
||||
; This file contains a list of people who're interested in reviewing pull requests
|
||||
; to certain parts of the code-base.
|
||||
;
|
||||
; This is mostly used internally for PR assignment, and may change over time.
|
||||
;
|
||||
; If you have permission to merge PRs (mostly equivalent to "do you work at Zed Industries"),
|
||||
; we strongly encourage you to put your name in the "all" bucket, but you can also add yourself
|
||||
; to other areas too.
|
||||
|
||||
<all>
|
||||
= @cole-miller
|
||||
= @ConradIrwin
|
||||
= @danilo-leal
|
||||
= @dinocosta
|
||||
= @HactarCE
|
||||
= @kubkon
|
||||
= @maxdeviant
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
ai
|
||||
= @benbrandt
|
||||
= @bennetbo
|
||||
= @danilo-leal
|
||||
= @rtfeldman
|
||||
|
||||
audio
|
||||
= @dvdsk
|
||||
|
||||
crashes
|
||||
= @p1n3appl3
|
||||
= @Veykril
|
||||
|
||||
debugger
|
||||
= @Anthony-Eid
|
||||
= @kubkon
|
||||
= @osiewicz
|
||||
|
||||
design
|
||||
= @danilo-leal
|
||||
|
||||
docs
|
||||
= @probably-neb
|
||||
|
||||
extension
|
||||
= @kubkon
|
||||
|
||||
git
|
||||
= @cole-miller
|
||||
= @danilo-leal
|
||||
|
||||
gpui
|
||||
= @Anthony-Eid
|
||||
= @cameron1024
|
||||
= @mikayla-maki
|
||||
= @probably-neb
|
||||
|
||||
helix
|
||||
= @kubkon
|
||||
|
||||
languages
|
||||
= @osiewicz
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
linux
|
||||
= @cole-miller
|
||||
= @dvdsk
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
= @smitbarmase
|
||||
|
||||
lsp
|
||||
= @osiewicz
|
||||
= @smitbarmase
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
multi_buffer
|
||||
= @Veykril
|
||||
= @SomeoneToIgnore
|
||||
|
||||
pickers
|
||||
= @dvdsk
|
||||
= @p1n3appl3
|
||||
= @SomeoneToIgnore
|
||||
|
||||
project_panel
|
||||
= @smitbarmase
|
||||
|
||||
settings_ui
|
||||
= @Anthony-Eid
|
||||
= @danilo-leal
|
||||
= @probably-neb
|
||||
|
||||
tasks
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
terminal
|
||||
= @kubkon
|
||||
= @Veykril
|
||||
|
||||
vim
|
||||
= @ConradIrwin
|
||||
= @dinocosta
|
||||
= @p1n3appl3
|
||||
= @probably-neb
|
||||
|
||||
windows
|
||||
= @localcc
|
||||
= @reflectronic
|
||||
4
assets/icons/chevron_down_up.svg
Normal file
4
assets/icons/chevron_down_up.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.3335 13.3333L8.00017 10L4.66685 13.3333" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.3335 2.66669L8.00017 6.00002L4.66685 2.66669" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 382 B |
5
assets/icons/link.svg
Normal file
5
assets/icons/link.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.2 11H5C4.20435 11 3.44129 10.6839 2.87868 10.1213C2.31607 9.55871 2 8.79565 2 8C2 7.20435 2.31607 6.44129 2.87868 5.87868C3.44129 5.31607 4.20435 5 5 5H6.2" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.80005 5H11C11.7957 5 12.5588 5.31607 13.1214 5.87868C13.684 6.44129 14 7.20435 14 8C14 8.79565 13.684 9.55871 13.1214 10.1213C12.5588 10.6839 11.7957 11 11 11H9.80005" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5.6001 8H10.4001" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 735 B |
|
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 3.9 KiB After Width: | Height: | Size: 3.9 KiB |
@@ -43,7 +43,8 @@
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -235,12 +236,13 @@
|
||||
"ctrl-alt-n": "agent::NewTextThread",
|
||||
"ctrl-shift-h": "agent::OpenHistory",
|
||||
"ctrl-alt-c": "agent::OpenSettings",
|
||||
"ctrl-alt-p": "agent::OpenRulesLibrary",
|
||||
"ctrl-alt-p": "agent::ManageProfiles",
|
||||
"ctrl-alt-l": "agent::OpenRulesLibrary",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"ctrl-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl->": "agent::AddSelectionToThread",
|
||||
@@ -407,6 +409,7 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"shift-find": "search::FocusSearch",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"ctrl-shift-f": "search::FocusSearch",
|
||||
"ctrl-shift-h": "search::ToggleReplace",
|
||||
"alt-ctrl-g": "search::ToggleRegex",
|
||||
@@ -479,6 +482,7 @@
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"alt-find": "project_search::ToggleFilters",
|
||||
"alt-ctrl-f": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"ctrl-alt-shift-r": "search::ToggleRegex",
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex",
|
||||
"alt-r": "search::ToggleRegex",
|
||||
@@ -609,7 +613,7 @@
|
||||
"ctrl-alt-b": "workspace::ToggleRightDock",
|
||||
"ctrl-b": "workspace::ToggleLeftDock",
|
||||
"ctrl-j": "workspace::ToggleBottomDock",
|
||||
"ctrl-alt-y": "workspace::CloseAllDocks",
|
||||
"ctrl-alt-y": "workspace::ToggleAllDocks",
|
||||
"ctrl-alt-0": "workspace::ResetActiveDockSize",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -731,6 +735,14 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-right": "editor::NextSnippetTabstop",
|
||||
"alt-left": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -933,6 +945,7 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1012,7 +1025,8 @@
|
||||
"context": "CollabPanel",
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1126,7 +1140,8 @@
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask"
|
||||
"alt-t": "terminal::RerunTask",
|
||||
"ctrl-shift-5": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1242,6 +1257,14 @@
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1298,5 +1321,12 @@
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
"ctrl-cmd-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu",
|
||||
"cmd-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -274,12 +275,13 @@
|
||||
"cmd-alt-n": "agent::NewTextThread",
|
||||
"cmd-shift-h": "agent::OpenHistory",
|
||||
"cmd-alt-c": "agent::OpenSettings",
|
||||
"cmd-alt-p": "agent::OpenRulesLibrary",
|
||||
"cmd-alt-l": "agent::OpenRulesLibrary",
|
||||
"cmd-alt-p": "agent::ManageProfiles",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"cmd-alt-/": "agent::ToggleModelSelector",
|
||||
"cmd-shift-a": "agent::ToggleContextPicker",
|
||||
"cmd-shift-j": "agent::ToggleNavigationMenu",
|
||||
"cmd-shift-i": "agent::ToggleOptionsMenu",
|
||||
"cmd-alt-m": "agent::ToggleOptionsMenu",
|
||||
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"cmd->": "agent::AddSelectionToThread",
|
||||
@@ -468,6 +470,7 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"cmd-shift-j": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"cmd-shift-f": "search::FocusSearch",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
@@ -496,6 +499,7 @@
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"cmd-shift-j": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
@@ -679,7 +683,7 @@
|
||||
"cmd-alt-b": "workspace::ToggleRightDock",
|
||||
"cmd-r": "workspace::ToggleRightDock",
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
"alt-cmd-y": "workspace::CloseAllDocks",
|
||||
"alt-cmd-y": "workspace::ToggleAllDocks",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"ctrl-alt-0": "workspace::ResetActiveDockSize",
|
||||
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -801,6 +805,14 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-right": "editor::NextSnippetTabstop",
|
||||
"alt-left": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
@@ -1026,6 +1038,7 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1077,7 +1090,8 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1209,6 +1223,7 @@
|
||||
"ctrl-alt-down": "pane::SplitDown",
|
||||
"ctrl-alt-left": "pane::SplitLeft",
|
||||
"ctrl-alt-right": "pane::SplitRight",
|
||||
"cmd-d": "pane::SplitRight",
|
||||
"cmd-alt-r": "terminal::RerunTask"
|
||||
}
|
||||
},
|
||||
@@ -1347,6 +1362,14 @@
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1404,5 +1427,12 @@
|
||||
"cmd-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"cmd-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -41,7 +41,8 @@
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu",
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu"
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -236,12 +237,13 @@
|
||||
"shift-alt-n": "agent::NewTextThread",
|
||||
"ctrl-shift-h": "agent::OpenHistory",
|
||||
"shift-alt-c": "agent::OpenSettings",
|
||||
"shift-alt-p": "agent::OpenRulesLibrary",
|
||||
"shift-alt-l": "agent::OpenRulesLibrary",
|
||||
"shift-alt-p": "agent::ManageProfiles",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"shift-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "agent::AddSelectionToThread",
|
||||
@@ -488,6 +490,7 @@
|
||||
"alt-c": "search::ToggleCaseSensitive",
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"alt-f": "project_search::ToggleFilters",
|
||||
"shift-enter": "project_search::ToggleAllSearchResults",
|
||||
"alt-r": "search::ToggleRegex",
|
||||
// "ctrl-shift-alt-x": "search::ToggleRegex",
|
||||
"ctrl-k shift-enter": "pane::TogglePinTab"
|
||||
@@ -614,7 +617,7 @@
|
||||
"ctrl-alt-b": "workspace::ToggleRightDock",
|
||||
"ctrl-b": "workspace::ToggleLeftDock",
|
||||
"ctrl-j": "workspace::ToggleBottomDock",
|
||||
"ctrl-shift-y": "workspace::CloseAllDocks",
|
||||
"ctrl-shift-y": "workspace::ToggleAllDocks",
|
||||
"alt-r": "workspace::ResetActiveDockSize",
|
||||
// For 0px parameter, uses UI font size value.
|
||||
"shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
|
||||
@@ -736,6 +739,14 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-right": "editor::NextSnippetTabstop",
|
||||
"alt-left": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -943,6 +954,7 @@
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-down": "git::PullRebase",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
@@ -1030,7 +1042,8 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"alt-up": "collab_panel::MoveChannelUp",
|
||||
"alt-down": "collab_panel::MoveChannelDown"
|
||||
"alt-down": "collab_panel::MoveChannelDown",
|
||||
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1152,7 +1165,8 @@
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask"
|
||||
"alt-t": "terminal::RerunTask",
|
||||
"ctrl-shift-5": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1270,6 +1284,14 @@
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
|
||||
"ctrl-space": "git::WorktreeFromDefault"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1327,5 +1349,12 @@
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"ctrl-shift-f12": "workspace::CloseAllDocks",
|
||||
"ctrl-shift-f12": "workspace::ToggleAllDocks",
|
||||
"ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"alt-shift-f10": "task::Spawn",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
|
||||
@@ -93,7 +93,7 @@
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"cmd-shift-f12": "workspace::CloseAllDocks",
|
||||
"cmd-shift-f12": "workspace::ToggleAllDocks",
|
||||
"cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-alt-r": "task::Spawn",
|
||||
"cmd-e": "file_finder::Toggle",
|
||||
|
||||
@@ -220,6 +220,8 @@
|
||||
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
|
||||
"] )": ["vim::UnmatchedForward", { "char": ")" }],
|
||||
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
|
||||
"[ r": "vim::GoToPreviousReference",
|
||||
"] r": "vim::GoToNextReference",
|
||||
// tree-sitter related commands
|
||||
"[ x": "vim::SelectLargerSyntaxNode",
|
||||
"] x": "vim::SelectSmallerSyntaxNode"
|
||||
@@ -419,6 +421,12 @@
|
||||
"ctrl-[": "editor::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == helix_select && !menu",
|
||||
"bindings": {
|
||||
"escape": "vim::SwitchToHelixNormalMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
|
||||
"bindings": {
|
||||
@@ -432,7 +440,7 @@
|
||||
"shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }],
|
||||
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }],
|
||||
"alt-.": "vim::RepeatFind",
|
||||
|
||||
|
||||
// Changes
|
||||
"shift-r": "editor::Paste",
|
||||
"`": "vim::ConvertToLowerCase",
|
||||
@@ -442,14 +450,14 @@
|
||||
"ctrl-r": "vim::Redo",
|
||||
"y": "vim::HelixYank",
|
||||
"p": "vim::HelixPaste",
|
||||
"shift-p": ["vim::HelixPaste", { "before": true }],
|
||||
"shift-p": ["vim::HelixPaste", { "before": true }],
|
||||
">": "vim::Indent",
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"d": "vim::HelixDelete",
|
||||
"c": "vim::HelixSubstitute",
|
||||
"alt-c": "vim::HelixSubstituteNoYank",
|
||||
|
||||
|
||||
// Selection manipulation
|
||||
"s": "vim::HelixSelectRegex",
|
||||
"alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }],
|
||||
@@ -466,7 +474,7 @@
|
||||
"alt-i": "editor::SelectSmallerSyntaxNode",
|
||||
"alt-p": "editor::SelectPreviousSyntaxNode",
|
||||
"alt-n": "editor::SelectNextSyntaxNode",
|
||||
|
||||
|
||||
// Goto mode
|
||||
"g e": "vim::EndOfDocument",
|
||||
"g h": "vim::StartOfLine",
|
||||
@@ -477,11 +485,11 @@
|
||||
"g b": "vim::WindowBottom",
|
||||
"g r": "editor::FindAllReferences", // zed specific
|
||||
"g n": "pane::ActivateNextItem",
|
||||
"shift-l": "pane::ActivateNextItem",
|
||||
"shift-l": "pane::ActivateNextItem",
|
||||
"g p": "pane::ActivatePreviousItem",
|
||||
"shift-h": "pane::ActivatePreviousItem",
|
||||
"g .": "vim::HelixGotoLastModification", // go to last modification
|
||||
|
||||
|
||||
// Window mode
|
||||
"space w h": "workspace::ActivatePaneLeft",
|
||||
"space w l": "workspace::ActivatePaneRight",
|
||||
@@ -512,7 +520,7 @@
|
||||
"]": ["vim::PushHelixNext", { "around": true }],
|
||||
"[": ["vim::PushHelixPrevious", { "around": true }],
|
||||
"g q": "vim::PushRewrap",
|
||||
"g w": "vim::PushRewrap",
|
||||
"g w": "vim::PushRewrap"
|
||||
// "tab": "pane::ActivateNextItem",
|
||||
// "shift-tab": "pane::ActivatePrevItem",
|
||||
}
|
||||
@@ -1017,5 +1025,16 @@
|
||||
// and Windows.
|
||||
"alt-l": "editor::AcceptEditPrediction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu && !search",
|
||||
"bindings": {
|
||||
"l": "settings_editor::ExpandNavEntry",
|
||||
"h": "settings_editor::CollapseNavEntry",
|
||||
"k": "settings_editor::FocusPreviousNavEntry",
|
||||
"j": "settings_editor::FocusNextNavEntry",
|
||||
"g g": "settings_editor::FocusFirstNavEntry",
|
||||
"shift-g": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,179 +0,0 @@
|
||||
You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.
|
||||
|
||||
## Communication
|
||||
|
||||
1. Be conversational but professional.
|
||||
2. Refer to the user in the second person and yourself in the first person.
|
||||
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names.
|
||||
4. NEVER lie or make things up.
|
||||
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
|
||||
|
||||
{{#if has_tools}}
|
||||
## Tool Use
|
||||
|
||||
1. Make sure to adhere to the tools schema.
|
||||
2. Provide every required argument.
|
||||
3. DO NOT use tools to access items that are already available in the context section.
|
||||
4. Use only the tools that are currently available.
|
||||
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
|
||||
6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers.
|
||||
7. Avoid HTML entity escaping - use plain characters instead.
|
||||
|
||||
## Searching and Reading
|
||||
|
||||
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
|
||||
|
||||
{{! TODO: If there are files, we should mention it but otherwise omit that fact }}
|
||||
If appropriate, use tool calls to explore the current project, which contains the following root directories:
|
||||
|
||||
{{#each worktrees}}
|
||||
- `{{abs_path}}`
|
||||
{{/each}}
|
||||
|
||||
- Bias towards not asking the user for help if you can find the answer yourself.
|
||||
- When providing paths to tools, the path should always start with the name of a project root directory listed above.
|
||||
- Before you read or edit a file, you must first find the full path. DO NOT ever guess a file path!
|
||||
{{# if (has_tool 'grep') }}
|
||||
- When looking for symbols in the project, prefer the `grep` tool.
|
||||
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
|
||||
- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file.
|
||||
{{/if}}
|
||||
{{else}}
|
||||
You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you).
|
||||
|
||||
As such, if you need the user to perform any actions for you, you must request them explicitly. Bias towards giving a response to the best of your ability, and then making requests for the user to take action (e.g. to give you more context) only optionally.
|
||||
|
||||
The one exception to this is if the user references something you don't know about - for example, the name of a source code file, function, type, or other piece of code that you have no awareness of. In this case, you MUST NOT MAKE SOMETHING UP, or assume you know what that thing is or how it works. Instead, you must ask the user for clarification rather than giving a response.
|
||||
{{/if}}
|
||||
|
||||
## Code Block Formatting
|
||||
|
||||
Whenever you mention a code block, you MUST use ONLY use the following format:
|
||||
```path/to/Something.blah#L123-456
|
||||
(code goes here)
|
||||
```
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
|
||||
is a path in the project. (If there is no valid path in the project, then you can use
|
||||
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
|
||||
does not understand the more common ```language syntax, or bare ``` blocks. It only
|
||||
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
|
||||
You have made a mistake. You can only ever put paths after triple backticks!
|
||||
<example>
|
||||
Based on all the information I've gathered, here's a summary of how this system works:
|
||||
1. The README file is loaded into the system.
|
||||
2. The system finds the first two headers, including everything in between. In this case, that would be:
|
||||
```path/to/README.md#L8-12
|
||||
# First Header
|
||||
This is the info under the first header.
|
||||
## Sub-header
|
||||
```
|
||||
3. Then the system finds the last header in the README:
|
||||
```path/to/README.md#L27-29
|
||||
## Last Header
|
||||
This is the last header in the README.
|
||||
```
|
||||
4. Finally, it passes this information on to the next process.
|
||||
</example>
|
||||
<example>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</example>
|
||||
Here are examples of ways you must never render code blocks:
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it does not include the path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it has the language instead of the path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because it uses indentation to mark the code block
|
||||
instead of backticks with a path.
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
```markdown
|
||||
/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks.
|
||||
|
||||
{{#if has_tools}}
|
||||
## Fixing Diagnostics
|
||||
|
||||
1. Make 1-2 attempts at fixing diagnostics, then defer to the user.
|
||||
2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem.
|
||||
|
||||
## Debugging
|
||||
|
||||
When debugging, only make code changes if you are certain that you can solve the problem.
|
||||
Otherwise, follow debugging best practices:
|
||||
1. Address the root cause instead of the symptoms.
|
||||
2. Add descriptive logging statements and error messages to track variable and code state.
|
||||
3. Add test functions and statements to isolate the problem.
|
||||
|
||||
{{/if}}
|
||||
## Calling External APIs
|
||||
|
||||
1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission.
|
||||
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data.
|
||||
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
|
||||
|
||||
## System Information
|
||||
|
||||
Operating System: {{os}}
|
||||
Default Shell: {{shell}}
|
||||
|
||||
{{#if (or has_rules has_user_rules)}}
|
||||
## User's Custom Instructions
|
||||
|
||||
The following additional instructions are provided by the user, and should be followed to the best of your ability{{#if has_tools}} without interfering with the tool use guidelines{{/if}}.
|
||||
|
||||
{{#if has_rules}}
|
||||
There are project rules that apply to these root directories:
|
||||
{{#each worktrees}}
|
||||
{{#if rules_file}}
|
||||
`{{root_name}}/{{rules_file.path_in_worktree}}`:
|
||||
``````
|
||||
{{{rules_file.text}}}
|
||||
``````
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{#if has_user_rules}}
|
||||
The user has specified the following rules that should be applied:
|
||||
{{#each user_rules}}
|
||||
|
||||
{{#if title}}
|
||||
Rules title: {{title}}
|
||||
{{/if}}
|
||||
``````
|
||||
{{contents}}
|
||||
``````
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
{{/if}}
|
||||
@@ -255,6 +255,19 @@
|
||||
// Whether to display inline and alongside documentation for items in the
|
||||
// completions menu
|
||||
"show_completion_documentation": true,
|
||||
// When to show the scrollbar in the completion menu.
|
||||
// This setting can take four values:
|
||||
//
|
||||
// 1. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior
|
||||
// "auto"
|
||||
// 2. Match the system's configured behavior:
|
||||
// "system"
|
||||
// 3. Always show the scrollbar:
|
||||
// "always"
|
||||
// 4. Never show the scrollbar:
|
||||
// "never" (default)
|
||||
"completion_menu_scrollbar": "never",
|
||||
// Show method signatures in the editor, when inside parentheses.
|
||||
"auto_signature_help": false,
|
||||
// Whether to show the signature help after completion or a bracket pair inserted.
|
||||
@@ -592,7 +605,7 @@
|
||||
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
|
||||
// happens when a user holds the alt or option key while scrolling.
|
||||
"fast_scroll_sensitivity": 4.0,
|
||||
"relative_line_numbers": false,
|
||||
"relative_line_numbers": "disabled",
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
// Search options to enable by default when opening new project and buffer searches.
|
||||
@@ -602,7 +615,9 @@
|
||||
"whole_word": false,
|
||||
"case_sensitive": false,
|
||||
"include_ignored": false,
|
||||
"regex": false
|
||||
"regex": false,
|
||||
// Whether to center the cursor on each search match when navigating.
|
||||
"center_on_match": false
|
||||
},
|
||||
// When to populate a new search's query based on the text under the cursor.
|
||||
// This setting can take the following three values:
|
||||
@@ -1232,6 +1247,9 @@
|
||||
// that are overly broad can slow down Zed's file scanning. `file_scan_exclusions` takes
|
||||
// precedence over these inclusions.
|
||||
"file_scan_inclusions": [".env*"],
|
||||
// Globs to match files that will be considered "hidden". These files can be hidden from the
|
||||
// project panel by toggling the "hide_hidden" setting.
|
||||
"hidden_files": ["**/.*"],
|
||||
// Git gutter behavior configuration.
|
||||
"git": {
|
||||
// Control whether the git gutter is shown. May take 2 values:
|
||||
@@ -1329,7 +1347,7 @@
|
||||
"model": null,
|
||||
"max_tokens": null
|
||||
},
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// Whether edit predictions are enabled when editing text threads in the agent panel.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true
|
||||
},
|
||||
@@ -1700,6 +1718,7 @@
|
||||
"preferred_line_length": 72
|
||||
},
|
||||
"Go": {
|
||||
"hard_tabs": true,
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
@@ -1718,6 +1737,9 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"HTML+ERB": {
|
||||
"language_servers": ["herb", "!ruby-lsp", "..."]
|
||||
},
|
||||
"Java": {
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
@@ -1740,6 +1762,9 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"JS+ERB": {
|
||||
"language_servers": ["!ruby-lsp", "..."]
|
||||
},
|
||||
"Kotlin": {
|
||||
"language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."]
|
||||
},
|
||||
@@ -1754,6 +1779,7 @@
|
||||
"Markdown": {
|
||||
"format_on_save": "off",
|
||||
"use_on_type_format": false,
|
||||
"remove_trailing_whitespace_on_save": false,
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width",
|
||||
"prettier": {
|
||||
@@ -1769,9 +1795,13 @@
|
||||
}
|
||||
},
|
||||
"Plain Text": {
|
||||
"allow_rewrap": "anywhere"
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "editor_width"
|
||||
},
|
||||
"Python": {
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports.ruff": true
|
||||
},
|
||||
"formatter": {
|
||||
"language_server": {
|
||||
"name": "ruff"
|
||||
@@ -1840,6 +1870,9 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"YAML+ERB": {
|
||||
"language_servers": ["!ruby-lsp", "..."]
|
||||
},
|
||||
"Zig": {
|
||||
"language_servers": ["zls", "..."]
|
||||
}
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -412,8 +412,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Hard",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -818,8 +818,8 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Soft",
|
||||
"appearance": "dark",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
"border.focused": "#303a36ff",
|
||||
@@ -1224,8 +1224,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
@@ -1630,8 +1630,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light Hard",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
@@ -2036,8 +2036,8 @@
|
||||
{
|
||||
"name": "Gruvbox Light Soft",
|
||||
"appearance": "light",
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
"border.focused": "#adc5ccff",
|
||||
|
||||
21
ci/Dockerfile.namespace
Normal file
21
ci/Dockerfile.namespace
Normal file
@@ -0,0 +1,21 @@
|
||||
ARG NAMESPACE_BASE_IMAGE_REF=""
|
||||
|
||||
# Your image must build FROM NAMESPACE_BASE_IMAGE_REF
|
||||
FROM ${NAMESPACE_BASE_IMAGE_REF} AS base
|
||||
|
||||
# Remove problematic git-lfs packagecloud source
|
||||
RUN sudo rm -f /etc/apt/sources.list.d/*git-lfs*.list
|
||||
# Install git and SSH for cloning private repositories
|
||||
RUN sudo apt-get update && \
|
||||
sudo apt-get install -y git openssh-client
|
||||
|
||||
# Clone the Zed repository
|
||||
RUN git clone https://github.com/zed-industries/zed.git ~/zed
|
||||
|
||||
# Run the Linux installation script
|
||||
WORKDIR /home/runner/zed
|
||||
RUN ./script/linux
|
||||
|
||||
# Clean up unnecessary files to reduce image size
|
||||
RUN sudo apt-get clean && sudo rm -rf \
|
||||
/home/runner/zed
|
||||
@@ -9,6 +9,9 @@ disallowed-methods = [
|
||||
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
|
||||
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
|
||||
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
|
||||
{ path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" },
|
||||
{ path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" },
|
||||
{ path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" },
|
||||
{ path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
|
||||
{ path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
|
||||
]
|
||||
|
||||
26
compose.yml
26
compose.yml
@@ -33,32 +33,6 @@ services:
|
||||
volumes:
|
||||
- ./livekit.yaml:/livekit.yaml
|
||||
|
||||
postgrest_app:
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_app
|
||||
ports:
|
||||
- 8081:8081
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_app.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
postgrest_llm:
|
||||
image: docker.io/postgrest/postgrest
|
||||
container_name: postgrest_llm
|
||||
ports:
|
||||
- 8082:8082
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed_llm
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_llm.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
stripe-mock:
|
||||
image: docker.io/stripe/stripe-mock:v0.178.0
|
||||
ports:
|
||||
|
||||
@@ -3,7 +3,6 @@ mod diff;
|
||||
mod mention;
|
||||
mod terminal;
|
||||
|
||||
use ::terminal::terminal_settings::TerminalSettings;
|
||||
use agent_settings::AgentSettings;
|
||||
use collections::HashSet;
|
||||
pub use connection::*;
|
||||
@@ -12,7 +11,7 @@ use language::language_settings::FormatOnSave;
|
||||
pub use mention::*;
|
||||
use project::lsp_store::{FormatTrigger, LspFormatTarget};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use settings::Settings as _;
|
||||
use task::{Shell, ShellBuilder};
|
||||
pub use terminal::*;
|
||||
|
||||
@@ -35,7 +34,7 @@ use std::rc::Rc;
|
||||
use std::time::{Duration, Instant};
|
||||
use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
|
||||
use ui::App;
|
||||
use util::{ResultExt, get_default_system_shell_preferring_bash};
|
||||
use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -95,9 +94,14 @@ pub enum AssistantMessageChunk {
|
||||
}
|
||||
|
||||
impl AssistantMessageChunk {
|
||||
pub fn from_str(chunk: &str, language_registry: &Arc<LanguageRegistry>, cx: &mut App) -> Self {
|
||||
pub fn from_str(
|
||||
chunk: &str,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
Self::Message {
|
||||
block: ContentBlock::new(chunk.into(), language_registry, cx),
|
||||
block: ContentBlock::new(chunk.into(), language_registry, path_style, cx),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,6 +190,7 @@ impl ToolCall {
|
||||
tool_call: acp::ToolCall,
|
||||
status: ToolCallStatus,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Self> {
|
||||
@@ -199,6 +204,7 @@ impl ToolCall {
|
||||
content.push(ToolCallContent::from_acp(
|
||||
item,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?);
|
||||
@@ -223,6 +229,7 @@ impl ToolCall {
|
||||
&mut self,
|
||||
fields: acp::ToolCallUpdateFields,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<()> {
|
||||
@@ -260,12 +267,13 @@ impl ToolCall {
|
||||
|
||||
// Reuse existing content if we can
|
||||
for (old, new) in self.content.iter_mut().zip(content.by_ref()) {
|
||||
old.update_from_acp(new, language_registry.clone(), terminals, cx)?;
|
||||
old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?;
|
||||
}
|
||||
for new in content {
|
||||
self.content.push(ToolCallContent::from_acp(
|
||||
new,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?)
|
||||
@@ -450,21 +458,23 @@ impl ContentBlock {
|
||||
pub fn new(
|
||||
block: acp::ContentBlock,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let mut this = Self::Empty;
|
||||
this.append(block, language_registry, cx);
|
||||
this.append(block, language_registry, path_style, cx);
|
||||
this
|
||||
}
|
||||
|
||||
pub fn new_combined(
|
||||
blocks: impl IntoIterator<Item = acp::ContentBlock>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let mut this = Self::Empty;
|
||||
for block in blocks {
|
||||
this.append(block, &language_registry, cx);
|
||||
this.append(block, &language_registry, path_style, cx);
|
||||
}
|
||||
this
|
||||
}
|
||||
@@ -473,6 +483,7 @@ impl ContentBlock {
|
||||
&mut self,
|
||||
block: acp::ContentBlock,
|
||||
language_registry: &Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
cx: &mut App,
|
||||
) {
|
||||
if matches!(self, ContentBlock::Empty)
|
||||
@@ -482,7 +493,7 @@ impl ContentBlock {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_content = self.block_string_contents(block);
|
||||
let new_content = self.block_string_contents(block, path_style);
|
||||
|
||||
match self {
|
||||
ContentBlock::Empty => {
|
||||
@@ -492,7 +503,7 @@ impl ContentBlock {
|
||||
markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx));
|
||||
}
|
||||
ContentBlock::ResourceLink { resource_link } => {
|
||||
let existing_content = Self::resource_link_md(&resource_link.uri);
|
||||
let existing_content = Self::resource_link_md(&resource_link.uri, path_style);
|
||||
let combined = format!("{}\n{}", existing_content, new_content);
|
||||
|
||||
*self = Self::create_markdown_block(combined, language_registry, cx);
|
||||
@@ -511,11 +522,11 @@ impl ContentBlock {
|
||||
}
|
||||
}
|
||||
|
||||
fn block_string_contents(&self, block: acp::ContentBlock) -> String {
|
||||
fn block_string_contents(&self, block: acp::ContentBlock, path_style: PathStyle) -> String {
|
||||
match block {
|
||||
acp::ContentBlock::Text(text_content) => text_content.text,
|
||||
acp::ContentBlock::ResourceLink(resource_link) => {
|
||||
Self::resource_link_md(&resource_link.uri)
|
||||
Self::resource_link_md(&resource_link.uri, path_style)
|
||||
}
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
resource:
|
||||
@@ -524,14 +535,14 @@ impl ContentBlock {
|
||||
..
|
||||
}),
|
||||
..
|
||||
}) => Self::resource_link_md(&uri),
|
||||
}) => Self::resource_link_md(&uri, path_style),
|
||||
acp::ContentBlock::Image(image) => Self::image_md(&image),
|
||||
acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_link_md(uri: &str) -> String {
|
||||
if let Some(uri) = MentionUri::parse(uri).log_err() {
|
||||
fn resource_link_md(uri: &str, path_style: PathStyle) -> String {
|
||||
if let Some(uri) = MentionUri::parse(uri, path_style).log_err() {
|
||||
uri.as_link().to_string()
|
||||
} else {
|
||||
uri.to_string()
|
||||
@@ -577,6 +588,7 @@ impl ToolCallContent {
|
||||
pub fn from_acp(
|
||||
content: acp::ToolCallContent,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Self> {
|
||||
@@ -584,6 +596,7 @@ impl ToolCallContent {
|
||||
acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new(
|
||||
content,
|
||||
&language_registry,
|
||||
path_style,
|
||||
cx,
|
||||
))),
|
||||
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
|
||||
@@ -607,6 +620,7 @@ impl ToolCallContent {
|
||||
&mut self,
|
||||
new: acp::ToolCallContent,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<()> {
|
||||
@@ -622,7 +636,7 @@ impl ToolCallContent {
|
||||
};
|
||||
|
||||
if needs_update {
|
||||
*self = Self::from_acp(new, language_registry, terminals, cx)?;
|
||||
*self = Self::from_acp(new, language_registry, path_style, terminals, cx)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1105,13 +1119,13 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), acp::Error> {
|
||||
match update {
|
||||
acp::SessionUpdate::UserMessageChunk { content } => {
|
||||
acp::SessionUpdate::UserMessageChunk(acp::ContentChunk { content, .. }) => {
|
||||
self.push_user_content_block(None, content, cx);
|
||||
}
|
||||
acp::SessionUpdate::AgentMessageChunk { content } => {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { content, .. }) => {
|
||||
self.push_assistant_content_block(content, false, cx);
|
||||
}
|
||||
acp::SessionUpdate::AgentThoughtChunk { content } => {
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { content, .. }) => {
|
||||
self.push_assistant_content_block(content, true, cx);
|
||||
}
|
||||
acp::SessionUpdate::ToolCall(tool_call) => {
|
||||
@@ -1123,12 +1137,14 @@ impl AcpThread {
|
||||
acp::SessionUpdate::Plan(plan) => {
|
||||
self.update_plan(plan, cx);
|
||||
}
|
||||
acp::SessionUpdate::AvailableCommandsUpdate { available_commands } => {
|
||||
cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands))
|
||||
}
|
||||
acp::SessionUpdate::CurrentModeUpdate { current_mode_id } => {
|
||||
cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id))
|
||||
}
|
||||
acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate {
|
||||
available_commands,
|
||||
..
|
||||
}) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)),
|
||||
acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate {
|
||||
current_mode_id,
|
||||
..
|
||||
}) => cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id)),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1140,6 +1156,7 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let entries_len = self.entries.len();
|
||||
|
||||
if let Some(last_entry) = self.entries.last_mut()
|
||||
@@ -1151,12 +1168,12 @@ impl AcpThread {
|
||||
}) = last_entry
|
||||
{
|
||||
*id = message_id.or(id.take());
|
||||
content.append(chunk.clone(), &language_registry, cx);
|
||||
content.append(chunk.clone(), &language_registry, path_style, cx);
|
||||
chunks.push(chunk);
|
||||
let idx = entries_len - 1;
|
||||
cx.emit(AcpThreadEvent::EntryUpdated(idx));
|
||||
} else {
|
||||
let content = ContentBlock::new(chunk.clone(), &language_registry, cx);
|
||||
let content = ContentBlock::new(chunk.clone(), &language_registry, path_style, cx);
|
||||
self.push_entry(
|
||||
AgentThreadEntry::UserMessage(UserMessage {
|
||||
id: message_id,
|
||||
@@ -1176,6 +1193,7 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let entries_len = self.entries.len();
|
||||
if let Some(last_entry) = self.entries.last_mut()
|
||||
&& let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry
|
||||
@@ -1185,10 +1203,10 @@ impl AcpThread {
|
||||
match (chunks.last_mut(), is_thought) {
|
||||
(Some(AssistantMessageChunk::Message { block }), false)
|
||||
| (Some(AssistantMessageChunk::Thought { block }), true) => {
|
||||
block.append(chunk, &language_registry, cx)
|
||||
block.append(chunk, &language_registry, path_style, cx)
|
||||
}
|
||||
_ => {
|
||||
let block = ContentBlock::new(chunk, &language_registry, cx);
|
||||
let block = ContentBlock::new(chunk, &language_registry, path_style, cx);
|
||||
if is_thought {
|
||||
chunks.push(AssistantMessageChunk::Thought { block })
|
||||
} else {
|
||||
@@ -1197,7 +1215,7 @@ impl AcpThread {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let block = ContentBlock::new(chunk, &language_registry, cx);
|
||||
let block = ContentBlock::new(chunk, &language_registry, path_style, cx);
|
||||
let chunk = if is_thought {
|
||||
AssistantMessageChunk::Thought { block }
|
||||
} else {
|
||||
@@ -1249,6 +1267,7 @@ impl AcpThread {
|
||||
) -> Result<()> {
|
||||
let update = update.into();
|
||||
let languages = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
|
||||
let ix = match self.index_for_tool_call(update.id()) {
|
||||
Some(ix) => ix,
|
||||
@@ -1265,6 +1284,7 @@ impl AcpThread {
|
||||
meta: None,
|
||||
}),
|
||||
&languages,
|
||||
path_style,
|
||||
cx,
|
||||
))],
|
||||
status: ToolCallStatus::Failed,
|
||||
@@ -1284,7 +1304,7 @@ impl AcpThread {
|
||||
match update {
|
||||
ToolCallUpdate::UpdateFields(update) => {
|
||||
let location_updated = update.fields.locations.is_some();
|
||||
call.update_fields(update.fields, languages, &self.terminals, cx)?;
|
||||
call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?;
|
||||
if location_updated {
|
||||
self.resolve_locations(update.id, cx);
|
||||
}
|
||||
@@ -1323,6 +1343,7 @@ impl AcpThread {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), acp::Error> {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let id = update.id.clone();
|
||||
|
||||
if let Some(ix) = self.index_for_tool_call(&id) {
|
||||
@@ -1330,7 +1351,13 @@ impl AcpThread {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
call.update_fields(update.fields, language_registry, &self.terminals, cx)?;
|
||||
call.update_fields(
|
||||
update.fields,
|
||||
language_registry,
|
||||
path_style,
|
||||
&self.terminals,
|
||||
cx,
|
||||
)?;
|
||||
call.status = status;
|
||||
|
||||
cx.emit(AcpThreadEvent::EntryUpdated(ix));
|
||||
@@ -1339,6 +1366,7 @@ impl AcpThread {
|
||||
update.try_into()?,
|
||||
status,
|
||||
language_registry,
|
||||
self.project.read(cx).path_style(cx),
|
||||
&self.terminals,
|
||||
cx,
|
||||
)?;
|
||||
@@ -1618,6 +1646,7 @@ impl AcpThread {
|
||||
let block = ContentBlock::new_combined(
|
||||
message.clone(),
|
||||
self.project.read(cx).languages().clone(),
|
||||
self.project.read(cx).path_style(cx),
|
||||
cx,
|
||||
);
|
||||
let request = acp::PromptRequest {
|
||||
@@ -2111,17 +2140,9 @@ impl AcpThread {
|
||||
) -> Task<Result<Entity<Terminal>>> {
|
||||
let env = match &cwd {
|
||||
Some(dir) => self.project.update(cx, |project, cx| {
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.as_path().into(), cx)
|
||||
project.environment().update(cx, |env, cx| {
|
||||
env.directory_environment(dir.as_path().into(), cx)
|
||||
})
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
@@ -2586,17 +2607,19 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentThoughtChunk {
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk {
|
||||
content: "Thinking ".into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentThoughtChunk {
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk {
|
||||
content: "hard!".into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
@@ -3095,9 +3118,10 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: content.text.to_uppercase().into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
@@ -3454,9 +3478,10 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: content.text.to_uppercase().into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -7,10 +7,10 @@ use std::{
|
||||
fmt,
|
||||
ops::RangeInclusive,
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
use ui::{App, IconName, SharedString};
|
||||
use url::Url;
|
||||
use util::paths::PathStyle;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
|
||||
pub enum MentionUri {
|
||||
@@ -49,7 +49,7 @@ pub enum MentionUri {
|
||||
}
|
||||
|
||||
impl MentionUri {
|
||||
pub fn parse(input: &str) -> Result<Self> {
|
||||
pub fn parse(input: &str, path_style: PathStyle) -> Result<Self> {
|
||||
fn parse_line_range(fragment: &str) -> Result<RangeInclusive<u32>> {
|
||||
let range = fragment
|
||||
.strip_prefix("L")
|
||||
@@ -74,25 +74,34 @@ impl MentionUri {
|
||||
let path = url.path();
|
||||
match url.scheme() {
|
||||
"file" => {
|
||||
let path = url.to_file_path().ok().context("Extracting file path")?;
|
||||
let path = if path_style.is_windows() {
|
||||
path.trim_start_matches("/")
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
if let Some(fragment) = url.fragment() {
|
||||
let line_range = parse_line_range(fragment)?;
|
||||
if let Some(name) = single_query_param(&url, "symbol")? {
|
||||
Ok(Self::Symbol {
|
||||
name,
|
||||
abs_path: path,
|
||||
abs_path: path.into(),
|
||||
line_range,
|
||||
})
|
||||
} else {
|
||||
Ok(Self::Selection {
|
||||
abs_path: Some(path),
|
||||
abs_path: Some(path.into()),
|
||||
line_range,
|
||||
})
|
||||
}
|
||||
} else if input.ends_with("/") {
|
||||
Ok(Self::Directory { abs_path: path })
|
||||
Ok(Self::Directory {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
} else {
|
||||
Ok(Self::File { abs_path: path })
|
||||
Ok(Self::File {
|
||||
abs_path: path.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
"zed" => {
|
||||
@@ -213,18 +222,14 @@ impl MentionUri {
|
||||
pub fn to_uri(&self) -> Url {
|
||||
match self {
|
||||
MentionUri::File { abs_path } => {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/file");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
|
||||
MentionUri::Directory { abs_path } => {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/directory");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
url
|
||||
}
|
||||
MentionUri::Symbol {
|
||||
@@ -232,10 +237,9 @@ impl MentionUri {
|
||||
name,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path(&format!("/agent/symbol/{name}"));
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&abs_path.to_string_lossy());
|
||||
url.query_pairs_mut().append_pair("symbol", name);
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
line_range.start() + 1,
|
||||
@@ -247,13 +251,14 @@ impl MentionUri {
|
||||
abs_path,
|
||||
line_range,
|
||||
} => {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
if let Some(abs_path) = abs_path {
|
||||
url.set_path("/agent/selection");
|
||||
url.query_pairs_mut()
|
||||
.append_pair("path", &abs_path.to_string_lossy());
|
||||
let mut url = if let Some(path) = abs_path {
|
||||
let mut url = Url::parse("file:///").unwrap();
|
||||
url.set_path(&path.to_string_lossy());
|
||||
url
|
||||
} else {
|
||||
let mut url = Url::parse("zed:///").unwrap();
|
||||
url.set_path("/agent/untitled-buffer");
|
||||
url
|
||||
};
|
||||
url.set_fragment(Some(&format!(
|
||||
"L{}:{}",
|
||||
@@ -288,14 +293,6 @@ impl MentionUri {
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for MentionUri {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
Self::parse(s)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MentionLink<'a>(&'a MentionUri);
|
||||
|
||||
impl fmt::Display for MentionLink<'_> {
|
||||
@@ -338,93 +335,81 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_parse_file_uri() {
|
||||
let old_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
let file_uri = uri!("file:///path/to/file.rs");
|
||||
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::File { abs_path } => {
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs"));
|
||||
assert_eq!(abs_path, Path::new(path!("/path/to/file.rs")));
|
||||
}
|
||||
_ => panic!("Expected File variant"),
|
||||
}
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/file"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_directory_uri() {
|
||||
let old_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
let file_uri = uri!("file:///path/to/dir/");
|
||||
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Directory { abs_path } => {
|
||||
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/"));
|
||||
assert_eq!(abs_path, Path::new(path!("/path/to/dir/")));
|
||||
}
|
||||
_ => panic!("Expected Directory variant"),
|
||||
}
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
assert_eq!(parsed.to_uri().to_string(), file_uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_directory_uri_without_slash() {
|
||||
let uri = MentionUri::Directory {
|
||||
abs_path: PathBuf::from(path!("/path/to/dir")),
|
||||
abs_path: PathBuf::from(path!("/path/to/dir/")),
|
||||
};
|
||||
let uri_string = uri.to_uri().to_string();
|
||||
assert!(uri_string.starts_with("zed:///agent/directory"));
|
||||
assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri);
|
||||
let expected = uri!("file:///path/to/dir/");
|
||||
assert_eq!(uri.to_uri().to_string(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_symbol_uri() {
|
||||
let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
|
||||
let parsed = MentionUri::parse(symbol_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Symbol {
|
||||
abs_path: path,
|
||||
name,
|
||||
line_range,
|
||||
} => {
|
||||
assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs"));
|
||||
assert_eq!(path, Path::new(path!("/path/to/file.rs")));
|
||||
assert_eq!(name, "MySymbol");
|
||||
assert_eq!(line_range.start(), &9);
|
||||
assert_eq!(line_range.end(), &19);
|
||||
}
|
||||
_ => panic!("Expected Symbol variant"),
|
||||
}
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
assert_eq!(parsed.to_uri().to_string(), symbol_uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_selection_uri() {
|
||||
let old_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(old_uri).unwrap();
|
||||
let selection_uri = uri!("file:///path/to/file.rs#L5:15");
|
||||
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Selection {
|
||||
abs_path: path,
|
||||
line_range,
|
||||
} => {
|
||||
assert_eq!(
|
||||
path.as_ref().unwrap().to_str().unwrap(),
|
||||
path!("/path/to/file.rs")
|
||||
);
|
||||
assert_eq!(path.as_ref().unwrap(), Path::new(path!("/path/to/file.rs")));
|
||||
assert_eq!(line_range.start(), &4);
|
||||
assert_eq!(line_range.end(), &14);
|
||||
}
|
||||
_ => panic!("Expected Selection variant"),
|
||||
}
|
||||
let new_uri = parsed.to_uri().to_string();
|
||||
assert!(new_uri.starts_with("zed:///agent/selection"));
|
||||
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
|
||||
assert_eq!(parsed.to_uri().to_string(), selection_uri);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_untitled_selection_uri() {
|
||||
let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10");
|
||||
let parsed = MentionUri::parse(selection_uri).unwrap();
|
||||
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Selection {
|
||||
abs_path: None,
|
||||
@@ -441,7 +426,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_thread_uri() {
|
||||
let thread_uri = "zed:///agent/thread/session123?name=Thread+name";
|
||||
let parsed = MentionUri::parse(thread_uri).unwrap();
|
||||
let parsed = MentionUri::parse(thread_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Thread {
|
||||
id: thread_id,
|
||||
@@ -458,7 +443,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_rule_uri() {
|
||||
let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule";
|
||||
let parsed = MentionUri::parse(rule_uri).unwrap();
|
||||
let parsed = MentionUri::parse(rule_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Rule { id, name } => {
|
||||
assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52");
|
||||
@@ -472,7 +457,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_fetch_http_uri() {
|
||||
let http_uri = "http://example.com/path?query=value#fragment";
|
||||
let parsed = MentionUri::parse(http_uri).unwrap();
|
||||
let parsed = MentionUri::parse(http_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Fetch { url } => {
|
||||
assert_eq!(url.to_string(), http_uri);
|
||||
@@ -485,7 +470,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_fetch_https_uri() {
|
||||
let https_uri = "https://example.com/api/endpoint";
|
||||
let parsed = MentionUri::parse(https_uri).unwrap();
|
||||
let parsed = MentionUri::parse(https_uri, PathStyle::local()).unwrap();
|
||||
match &parsed {
|
||||
MentionUri::Fetch { url } => {
|
||||
assert_eq!(url.to_string(), https_uri);
|
||||
@@ -497,40 +482,55 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_invalid_scheme() {
|
||||
assert!(MentionUri::parse("ftp://example.com").is_err());
|
||||
assert!(MentionUri::parse("ssh://example.com").is_err());
|
||||
assert!(MentionUri::parse("unknown://example.com").is_err());
|
||||
assert!(MentionUri::parse("ftp://example.com", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("ssh://example.com", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("unknown://example.com", PathStyle::local()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_zed_path() {
|
||||
assert!(MentionUri::parse("zed:///invalid/path").is_err());
|
||||
assert!(MentionUri::parse("zed:///agent/unknown/test").is_err());
|
||||
assert!(MentionUri::parse("zed:///invalid/path", PathStyle::local()).is_err());
|
||||
assert!(MentionUri::parse("zed:///agent/unknown/test", PathStyle::local()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_line_range_format() {
|
||||
// Missing L prefix
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#10:20")).is_err());
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#10:20"), PathStyle::local()).is_err()
|
||||
);
|
||||
|
||||
// Missing colon separator
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1020")).is_err());
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L1020"), PathStyle::local()).is_err()
|
||||
);
|
||||
|
||||
// Invalid numbers
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20")).is_err());
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20"), PathStyle::local()).is_err()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_query_parameters() {
|
||||
// Invalid query parameter name
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:20?invalid=test")).is_err());
|
||||
assert!(
|
||||
MentionUri::parse(
|
||||
uri!("file:///path/to/file.rs#L10:20?invalid=test"),
|
||||
PathStyle::local()
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
|
||||
// Too many query parameters
|
||||
assert!(
|
||||
MentionUri::parse(uri!(
|
||||
"file:///path/to/file.rs#L10:20?symbol=test&another=param"
|
||||
))
|
||||
MentionUri::parse(
|
||||
uri!("file:///path/to/file.rs#L10:20?symbol=test&another=param"),
|
||||
PathStyle::local()
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
@@ -538,8 +538,14 @@ mod tests {
|
||||
#[test]
|
||||
fn test_zero_based_line_numbers() {
|
||||
// Test that 0-based line numbers are rejected (should be 1-based)
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:10")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1:0")).is_err());
|
||||
assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:0")).is_err());
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L0:10"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L1:0"), PathStyle::local()).is_err()
|
||||
);
|
||||
assert!(
|
||||
MentionUri::parse(uri!("file:///path/to/file.rs#L0:0"), PathStyle::local()).is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,10 +5,8 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, Task};
|
||||
use language::LanguageRegistry;
|
||||
use markdown::Markdown;
|
||||
use project::Project;
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant};
|
||||
use task::Shell;
|
||||
use terminal::terminal_settings::TerminalSettings;
|
||||
use util::get_default_system_shell_preferring_bash;
|
||||
|
||||
pub struct Terminal {
|
||||
@@ -187,17 +185,9 @@ pub async fn create_terminal_entity(
|
||||
let mut env = if let Some(dir) = &cwd {
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.clone().into(), cx)
|
||||
project.environment().update(cx, |env, cx| {
|
||||
env.directory_environment(dir.clone().into(), cx)
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
|
||||
@@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Tooltip, prelude::*};
|
||||
use ui::{Tooltip, WithScrollbar, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{
|
||||
Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
|
||||
@@ -259,6 +259,15 @@ impl AcpTools {
|
||||
serde_json::to_string_pretty(&messages).ok()
|
||||
}
|
||||
|
||||
fn clear_messages(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(connection) = self.watched_connection.as_mut() {
|
||||
connection.messages.clear();
|
||||
connection.list_state.reset(0);
|
||||
self.expanded.clear();
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn render_message(
|
||||
&mut self,
|
||||
index: usize,
|
||||
@@ -282,17 +291,19 @@ impl AcpTools {
|
||||
let expanded = self.expanded.contains(&index);
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.px_4()
|
||||
.py_3()
|
||||
.border_color(colors.border)
|
||||
.border_b_1()
|
||||
.gap_2()
|
||||
.items_start()
|
||||
.font_buffer(cx)
|
||||
.text_size(base_size)
|
||||
.id(index)
|
||||
.group("message")
|
||||
.cursor_pointer()
|
||||
.font_buffer(cx)
|
||||
.w_full()
|
||||
.py_3()
|
||||
.pl_4()
|
||||
.pr_5()
|
||||
.gap_2()
|
||||
.items_start()
|
||||
.text_size(base_size)
|
||||
.border_color(colors.border)
|
||||
.border_b_1()
|
||||
.hover(|this| this.bg(colors.element_background.opacity(0.5)))
|
||||
.on_click(cx.listener(move |this, _, _, cx| {
|
||||
if this.expanded.contains(&index) {
|
||||
@@ -314,15 +325,14 @@ impl AcpTools {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.items_center()
|
||||
.flex_shrink_0()
|
||||
.child(match message.direction {
|
||||
acp::StreamMessageDirection::Incoming => {
|
||||
ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error)
|
||||
}
|
||||
acp::StreamMessageDirection::Outgoing => {
|
||||
ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success)
|
||||
}
|
||||
acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown)
|
||||
.color(Color::Error)
|
||||
.size(IconSize::Small),
|
||||
acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
})
|
||||
.child(
|
||||
Label::new(message.name.clone())
|
||||
@@ -492,7 +502,7 @@ impl Focusable for AcpTools {
|
||||
}
|
||||
|
||||
impl Render for AcpTools {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
@@ -507,13 +517,19 @@ impl Render for AcpTools {
|
||||
.child("No messages recorded yet")
|
||||
.into_any()
|
||||
} else {
|
||||
list(
|
||||
connection.list_state.clone(),
|
||||
cx.processor(Self::render_message),
|
||||
)
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.flex_grow()
|
||||
.into_any()
|
||||
div()
|
||||
.size_full()
|
||||
.flex_grow()
|
||||
.child(
|
||||
list(
|
||||
connection.list_state.clone(),
|
||||
cx.processor(Self::render_message),
|
||||
)
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.size_full(),
|
||||
)
|
||||
.vertical_scrollbar_for(connection.list_state.clone(), window, cx)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
None => h_flex()
|
||||
@@ -547,10 +563,16 @@ impl Render for AcpToolsToolbarItemView {
|
||||
};
|
||||
|
||||
let acp_tools = acp_tools.clone();
|
||||
let has_messages = acp_tools
|
||||
.read(cx)
|
||||
.watched_connection
|
||||
.as_ref()
|
||||
.is_some_and(|connection| !connection.messages.is_empty());
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
.child({
|
||||
let acp_tools = acp_tools.clone();
|
||||
IconButton::new(
|
||||
"copy_all_messages",
|
||||
if self.just_copied {
|
||||
@@ -565,13 +587,7 @@ impl Render for AcpToolsToolbarItemView {
|
||||
} else {
|
||||
"Copy All Messages"
|
||||
}))
|
||||
.disabled(
|
||||
acp_tools
|
||||
.read(cx)
|
||||
.watched_connection
|
||||
.as_ref()
|
||||
.is_none_or(|connection| connection.messages.is_empty()),
|
||||
)
|
||||
.disabled(!has_messages)
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
if let Some(content) = acp_tools.read(cx).serialize_observed_messages() {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(content));
|
||||
@@ -586,7 +602,18 @@ impl Render for AcpToolsToolbarItemView {
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})),
|
||||
}))
|
||||
})
|
||||
.child(
|
||||
IconButton::new("clear_messages", IconName::Trash)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Clear Messages"))
|
||||
.disabled(!has_messages)
|
||||
.on_click(cx.listener(move |_this, _, _window, cx| {
|
||||
acp_tools.update(cx, |acp_tools, cx| {
|
||||
acp_tools.clear_messages(cx);
|
||||
});
|
||||
})),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use language::{
|
||||
LanguageServerStatusUpdate, ServerHealth,
|
||||
};
|
||||
use project::{
|
||||
LanguageServerProgress, LspStoreEvent, Project, ProjectEnvironmentEvent,
|
||||
LanguageServerProgress, LspStoreEvent, ProgressToken, Project, ProjectEnvironmentEvent,
|
||||
git_store::{GitStoreEvent, Repository},
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
@@ -61,7 +61,7 @@ struct ServerStatus {
|
||||
|
||||
struct PendingWork<'a> {
|
||||
language_server_id: LanguageServerId,
|
||||
progress_token: &'a str,
|
||||
progress_token: &'a ProgressToken,
|
||||
progress: &'a LanguageServerProgress,
|
||||
}
|
||||
|
||||
@@ -313,9 +313,9 @@ impl ActivityIndicator {
|
||||
let mut pending_work = status
|
||||
.pending_work
|
||||
.iter()
|
||||
.map(|(token, progress)| PendingWork {
|
||||
.map(|(progress_token, progress)| PendingWork {
|
||||
language_server_id: server_id,
|
||||
progress_token: token.as_str(),
|
||||
progress_token,
|
||||
progress,
|
||||
})
|
||||
.collect::<SmallVec<[_; 4]>>();
|
||||
@@ -358,11 +358,7 @@ impl ActivityIndicator {
|
||||
..
|
||||
}) = pending_work.next()
|
||||
{
|
||||
let mut message = progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(progress_token)
|
||||
.to_string();
|
||||
let mut message = progress.title.clone().unwrap_or(progress_token.to_string());
|
||||
|
||||
if let Some(percentage) = progress.percentage {
|
||||
write!(&mut message, " ({}%)", percentage).unwrap();
|
||||
@@ -773,7 +769,7 @@ impl Render for ActivityIndicator {
|
||||
let Some(content) = self.content_to_render(cx) else {
|
||||
return result;
|
||||
};
|
||||
let this = cx.entity().downgrade();
|
||||
let activity_indicator = cx.entity().downgrade();
|
||||
let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
@@ -815,22 +811,21 @@ impl Render for ActivityIndicator {
|
||||
)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
.menu(move |window, cx| {
|
||||
let strong_this = this.upgrade()?;
|
||||
let strong_this = activity_indicator.upgrade()?;
|
||||
let mut has_work = false;
|
||||
let menu = ContextMenu::build(window, cx, |mut menu, _, cx| {
|
||||
for work in strong_this.read(cx).pending_language_server_work(cx) {
|
||||
has_work = true;
|
||||
let this = this.clone();
|
||||
let activity_indicator = activity_indicator.clone();
|
||||
let mut title = work
|
||||
.progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_owned();
|
||||
.clone()
|
||||
.unwrap_or(work.progress_token.to_string());
|
||||
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.to_string();
|
||||
let token = work.progress_token.clone();
|
||||
let title = SharedString::from(title);
|
||||
menu = menu.custom_entry(
|
||||
move |_, _| {
|
||||
@@ -842,18 +837,23 @@ impl Render for ActivityIndicator {
|
||||
.into_any_element()
|
||||
},
|
||||
move |_, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
let token = token.clone();
|
||||
activity_indicator
|
||||
.update(cx, |activity_indicator, cx| {
|
||||
activity_indicator.project.update(
|
||||
cx,
|
||||
|project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
this.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
activity_indicator.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
|
||||
@@ -11,7 +11,7 @@ path = "src/agent.rs"
|
||||
[features]
|
||||
test-support = ["db/test-support"]
|
||||
eval = []
|
||||
edit-agent-eval = []
|
||||
unit-eval = []
|
||||
e2e = []
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -1035,12 +1035,13 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
let session_id = params.session_id.clone();
|
||||
log::info!("Received prompt request for session: {}", session_id);
|
||||
log::debug!("Prompt blocks count: {}", params.prompt.len());
|
||||
let path_style = self.0.read(cx).project.read(cx).path_style(cx);
|
||||
|
||||
self.run_turn(session_id, cx, |thread, cx| {
|
||||
self.run_turn(session_id, cx, move |thread, cx| {
|
||||
let content: Vec<UserMessageContent> = params
|
||||
.prompt
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.map(|block| UserMessageContent::from_content_block(block, path_style))
|
||||
.collect::<Vec<_>>();
|
||||
log::debug!("Converted prompt to message: {} chars", content.len());
|
||||
log::debug!("Message id: {:?}", id);
|
||||
|
||||
@@ -13,7 +13,15 @@ const EDITS_END_TAG: &str = "</edits>";
|
||||
const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
|
||||
const SEPARATOR_MARKER: &str = "=======";
|
||||
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
|
||||
const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
|
||||
const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
|
||||
const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
|
||||
const END_TAGS: [&str; 5] = [
|
||||
OLD_TEXT_END_TAG,
|
||||
NEW_TEXT_END_TAG,
|
||||
EDITS_END_TAG,
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_2,
|
||||
];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EditParserEvent {
|
||||
@@ -547,6 +555,37 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) {
|
||||
// This case is a regression with Claude Sonnet 4.5.
|
||||
// Sometimes Sonnet thinks that it's doing a tool call
|
||||
// and closes its response with '</parameter></invoke>'
|
||||
// instead of properly closing </new_text>
|
||||
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>some text</old_text><new_text>updated text</parameter></invoke>
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 2,
|
||||
mismatched_tags: 1
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_nested_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
@@ -1035,6 +1074,11 @@ mod tests {
|
||||
last_ix = chunk_ix;
|
||||
}
|
||||
|
||||
if new_text.is_some() {
|
||||
pending_edit.new_text = new_text.take().unwrap();
|
||||
edits.push(pending_edit);
|
||||
}
|
||||
|
||||
edits
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ use std::{
|
||||
use util::path;
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_extract_handle_command_output() {
|
||||
// Test how well agent generates multiple edit hunks.
|
||||
//
|
||||
@@ -108,7 +108,7 @@ fn eval_extract_handle_command_output() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_delete_run_git_blame() {
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
@@ -171,7 +171,7 @@ fn eval_delete_run_git_blame() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_translate_doc_comments() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -234,7 +234,7 @@ fn eval_translate_doc_comments() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -360,7 +360,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_disable_cursor_blinking() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -446,7 +446,7 @@ fn eval_disable_cursor_blinking() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_from_pixels_constructor() {
|
||||
// Results for 2025-06-13
|
||||
//
|
||||
@@ -656,7 +656,7 @@ fn eval_from_pixels_constructor() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_zode() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -763,7 +763,7 @@ fn eval_zode() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_add_overwrite_test() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
@@ -995,7 +995,7 @@ fn eval_add_overwrite_test() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "edit-agent-eval"), ignore)]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_create_empty_file() {
|
||||
// Check that Edit Agent can create a file without writing its
|
||||
// thoughts into it. This issue is not specific to empty files, but
|
||||
@@ -1483,11 +1483,11 @@ impl EditAgentTest {
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let agent_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
|
||||
&std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let judge_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
|
||||
&std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1547,7 +1547,7 @@ impl EditAgentTest {
|
||||
model.provider_id() == selected_model.provider
|
||||
&& model.id() == selected_model.model
|
||||
})
|
||||
.expect("Model not found");
|
||||
.unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0));
|
||||
model
|
||||
})
|
||||
}
|
||||
@@ -1581,6 +1581,7 @@ impl EditAgentTest {
|
||||
let template = crate::SystemPromptTemplate {
|
||||
project: &project_context,
|
||||
available_tools: tool_names,
|
||||
model_name: None,
|
||||
};
|
||||
let templates = Templates::new();
|
||||
template.render(&templates).unwrap()
|
||||
|
||||
@@ -38,6 +38,7 @@ pub struct SystemPromptTemplate<'a> {
|
||||
#[serde(flatten)]
|
||||
pub project: &'a prompt_store::ProjectContext,
|
||||
pub available_tools: Vec<SharedString>,
|
||||
pub model_name: Option<String>,
|
||||
}
|
||||
|
||||
impl Template for SystemPromptTemplate<'_> {
|
||||
@@ -79,9 +80,11 @@ mod tests {
|
||||
let template = SystemPromptTemplate {
|
||||
project: &project,
|
||||
available_tools: vec!["echo".into()],
|
||||
model_name: Some("test-model".to_string()),
|
||||
};
|
||||
let templates = Templates::new();
|
||||
let rendered = template.render(&templates).unwrap();
|
||||
assert!(rendered.contains("## Fixing Diagnostics"));
|
||||
assert!(rendered.contains("test-model"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -150,6 +150,12 @@ Otherwise, follow debugging best practices:
|
||||
Operating System: {{os}}
|
||||
Default Shell: {{shell}}
|
||||
|
||||
{{#if model_name}}
|
||||
## Model Information
|
||||
|
||||
You are powered by the model named {{model_name}}.
|
||||
|
||||
{{/if}}
|
||||
{{#if (or has_rules has_user_rules)}}
|
||||
## User's Custom Instructions
|
||||
|
||||
|
||||
@@ -160,6 +160,42 @@ async fn test_system_prompt(cx: &mut TestAppContext) {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_system_prompt_without_tools(cx: &mut TestAppContext) {
|
||||
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
|
||||
let fake_model = model.as_fake();
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send(UserMessageId::new(), ["abc"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
let mut pending_completions = fake_model.pending_completions();
|
||||
assert_eq!(
|
||||
pending_completions.len(),
|
||||
1,
|
||||
"unexpected pending completions: {:?}",
|
||||
pending_completions
|
||||
);
|
||||
|
||||
let pending_completion = pending_completions.pop().unwrap();
|
||||
assert_eq!(pending_completion.messages[0].role, Role::System);
|
||||
|
||||
let system_message = &pending_completion.messages[0];
|
||||
let system_prompt = system_message.content[0].to_str().unwrap();
|
||||
assert!(
|
||||
!system_prompt.contains("## Tool Use"),
|
||||
"unexpected system message: {:?}",
|
||||
system_message
|
||||
);
|
||||
assert!(
|
||||
!system_prompt.contains("## Fixing Diagnostics"),
|
||||
"unexpected system message: {:?}",
|
||||
system_message
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
|
||||
|
||||
@@ -50,7 +50,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use std::{fmt::Write, path::PathBuf};
|
||||
use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock};
|
||||
use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock, paths::PathStyle};
|
||||
use uuid::Uuid;
|
||||
|
||||
const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user";
|
||||
@@ -1816,9 +1816,15 @@ impl Thread {
|
||||
log::debug!("Completion intent: {:?}", completion_intent);
|
||||
log::debug!("Completion mode: {:?}", self.completion_mode);
|
||||
|
||||
let messages = self.build_request_messages(cx);
|
||||
let available_tools: Vec<_> = self
|
||||
.running_turn
|
||||
.as_ref()
|
||||
.map(|turn| turn.tools.keys().cloned().collect())
|
||||
.unwrap_or_default();
|
||||
|
||||
log::debug!("Request includes {} tools", available_tools.len());
|
||||
let messages = self.build_request_messages(available_tools, cx);
|
||||
log::debug!("Request will include {} messages", messages.len());
|
||||
log::debug!("Request includes {} tools", tools.len());
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
thread_id: Some(self.id.to_string()),
|
||||
@@ -1909,7 +1915,11 @@ impl Thread {
|
||||
self.running_turn.as_ref()?.tools.get(name).cloned()
|
||||
}
|
||||
|
||||
fn build_request_messages(&self, cx: &App) -> Vec<LanguageModelRequestMessage> {
|
||||
fn build_request_messages(
|
||||
&self,
|
||||
available_tools: Vec<SharedString>,
|
||||
cx: &App,
|
||||
) -> Vec<LanguageModelRequestMessage> {
|
||||
log::trace!(
|
||||
"Building request messages from {} thread messages",
|
||||
self.messages.len()
|
||||
@@ -1917,7 +1927,8 @@ impl Thread {
|
||||
|
||||
let system_prompt = SystemPromptTemplate {
|
||||
project: self.project_context.read(cx),
|
||||
available_tools: self.tools.keys().cloned().collect(),
|
||||
available_tools,
|
||||
model_name: self.model.as_ref().map(|m| m.name().0.to_string()),
|
||||
}
|
||||
.render(&self.templates)
|
||||
.context("failed to build system prompt")
|
||||
@@ -2538,8 +2549,8 @@ impl From<&str> for UserMessageContent {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<acp::ContentBlock> for UserMessageContent {
|
||||
fn from(value: acp::ContentBlock) -> Self {
|
||||
impl UserMessageContent {
|
||||
pub fn from_content_block(value: acp::ContentBlock, path_style: PathStyle) -> Self {
|
||||
match value {
|
||||
acp::ContentBlock::Text(text_content) => Self::Text(text_content.text),
|
||||
acp::ContentBlock::Image(image_content) => Self::Image(convert_image(image_content)),
|
||||
@@ -2548,7 +2559,7 @@ impl From<acp::ContentBlock> for UserMessageContent {
|
||||
Self::Text("[audio]".to_string())
|
||||
}
|
||||
acp::ContentBlock::ResourceLink(resource_link) => {
|
||||
match MentionUri::parse(&resource_link.uri) {
|
||||
match MentionUri::parse(&resource_link.uri, path_style) {
|
||||
Ok(uri) => Self::Mention {
|
||||
uri,
|
||||
content: String::new(),
|
||||
@@ -2561,7 +2572,7 @@ impl From<acp::ContentBlock> for UserMessageContent {
|
||||
}
|
||||
acp::ContentBlock::Resource(resource) => match resource.resource {
|
||||
acp::EmbeddedResourceResource::TextResourceContents(resource) => {
|
||||
match MentionUri::parse(&resource.uri) {
|
||||
match MentionUri::parse(&resource.uri, path_style) {
|
||||
Ok(uri) => Self::Mention {
|
||||
uri,
|
||||
content: resource.text,
|
||||
|
||||
@@ -38,6 +38,7 @@ language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
project.workspace = true
|
||||
release_channel.workspace = true
|
||||
reqwest_client = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -105,6 +105,14 @@ impl AcpConnection {
|
||||
|
||||
let sessions = Rc::new(RefCell::new(HashMap::default()));
|
||||
|
||||
let (release_channel, version) = cx.update(|cx| {
|
||||
(
|
||||
release_channel::ReleaseChannel::try_global(cx)
|
||||
.map(|release_channel| release_channel.display_name()),
|
||||
release_channel::AppVersion::global(cx).to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let client = ClientDelegate {
|
||||
sessions: sessions.clone(),
|
||||
cx: cx.clone(),
|
||||
@@ -170,8 +178,14 @@ impl AcpConnection {
|
||||
meta: Some(serde_json::json!({
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
"terminal_output": true,
|
||||
"terminal-auth": true,
|
||||
})),
|
||||
},
|
||||
client_info: Some(acp::Implementation {
|
||||
name: "zed".to_owned(),
|
||||
title: release_channel.map(|c| c.to_owned()),
|
||||
version,
|
||||
}),
|
||||
meta: None,
|
||||
})
|
||||
.await?;
|
||||
@@ -700,7 +714,11 @@ impl acp::Client for ClientDelegate {
|
||||
.get(¬ification.session_id)
|
||||
.context("Failed to get session")?;
|
||||
|
||||
if let acp::SessionUpdate::CurrentModeUpdate { current_mode_id } = ¬ification.update {
|
||||
if let acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate {
|
||||
current_mode_id,
|
||||
..
|
||||
}) = ¬ification.update
|
||||
{
|
||||
if let Some(session_modes) = &session.session_modes {
|
||||
session_modes.borrow_mut().current_mode_id = current_mode_id.clone();
|
||||
} else {
|
||||
|
||||
@@ -79,8 +79,6 @@ impl AgentServer for Codex {
|
||||
root_dir.as_deref(),
|
||||
extra_env,
|
||||
delegate.status_tx,
|
||||
// For now, report that there are no updates.
|
||||
// (A future PR will use the GitHub Releases API to fetch them.)
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
|
||||
@@ -253,17 +253,22 @@ impl ContextPickerCompletionProvider {
|
||||
) -> Option<Completion> {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
|
||||
let label = CodeLabel::plain(symbol.name.clone(), None);
|
||||
|
||||
let abs_path = match &symbol.path {
|
||||
SymbolLocation::InProject(project_path) => {
|
||||
project.read(cx).absolute_path(&project_path, cx)?
|
||||
}
|
||||
let (abs_path, file_name) = match &symbol.path {
|
||||
SymbolLocation::InProject(project_path) => (
|
||||
project.read(cx).absolute_path(&project_path, cx)?,
|
||||
project_path.path.file_name()?.to_string().into(),
|
||||
),
|
||||
SymbolLocation::OutsideProject {
|
||||
abs_path,
|
||||
signature: _,
|
||||
} => PathBuf::from(abs_path.as_ref()),
|
||||
} => (
|
||||
PathBuf::from(abs_path.as_ref()),
|
||||
abs_path.file_name().map(|f| f.to_string_lossy())?,
|
||||
),
|
||||
};
|
||||
|
||||
let label = build_symbol_label(&symbol.name, &file_name, symbol.range.start.0.row + 1, cx);
|
||||
|
||||
let uri = MentionUri::Symbol {
|
||||
abs_path,
|
||||
name: symbol.name.clone(),
|
||||
@@ -570,6 +575,7 @@ impl ContextPickerCompletionProvider {
|
||||
.unwrap_or_default();
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let include_root_name = workspace.visible_worktrees(cx).count() > 1;
|
||||
|
||||
if let Some(agent_panel) = workspace.panel::<AgentPanel>(cx)
|
||||
&& let Some(thread) = agent_panel.read(cx).active_agent_thread(cx)
|
||||
@@ -596,7 +602,11 @@ impl ContextPickerCompletionProvider {
|
||||
project
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|worktree| {
|
||||
let path_prefix = worktree.read(cx).root_name().into();
|
||||
let path_prefix = if include_root_name {
|
||||
worktree.read(cx).root_name().into()
|
||||
} else {
|
||||
RelPath::empty().into()
|
||||
};
|
||||
Match::File(FileMatch {
|
||||
mat: fuzzy::PathMatch {
|
||||
score: 1.,
|
||||
@@ -674,6 +684,17 @@ impl ContextPickerCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
|
||||
label.push_str(symbol_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(&format!("{} L{}", file_name, line), comment_id);
|
||||
|
||||
label.build()
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
@@ -812,9 +833,21 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
// If path is empty, this means we're matching with the root directory itself
|
||||
// so we use the path_prefix as the name
|
||||
let path_prefix = if mat.path.is_empty() {
|
||||
project
|
||||
.read(cx)
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|wt| wt.read(cx).root_name().into())
|
||||
.unwrap_or_else(|| mat.path_prefix.clone())
|
||||
} else {
|
||||
mat.path_prefix.clone()
|
||||
};
|
||||
|
||||
Self::completion_for_path(
|
||||
project_path,
|
||||
&mat.path_prefix,
|
||||
&path_prefix,
|
||||
is_recent,
|
||||
mat.is_dir,
|
||||
source_range.clone(),
|
||||
|
||||
@@ -4,7 +4,7 @@ use acp_thread::{AcpThread, AgentThreadEntry};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol::{self as acp, ToolCallId};
|
||||
use collections::HashMap;
|
||||
use editor::{Editor, EditorMode, MinimapVisibility};
|
||||
use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior};
|
||||
use gpui::{
|
||||
AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
|
||||
ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
|
||||
@@ -357,7 +357,7 @@ fn create_editor_diff(
|
||||
EditorMode::Full {
|
||||
scale_ui_elements_with_buffer_font_size: false,
|
||||
show_active_line_background: false,
|
||||
sized_by_content: true,
|
||||
sizing_behavior: SizingBehavior::SizeByContent,
|
||||
},
|
||||
diff.read(cx).multibuffer().clone(),
|
||||
None,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::{
|
||||
ChatWithFollow,
|
||||
acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion},
|
||||
context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content},
|
||||
};
|
||||
@@ -15,6 +16,7 @@ use editor::{
|
||||
MultiBuffer, ToOffset,
|
||||
actions::Paste,
|
||||
display_map::{Crease, CreaseId, FoldId},
|
||||
scroll::Autoscroll,
|
||||
};
|
||||
use futures::{
|
||||
FutureExt as _,
|
||||
@@ -49,7 +51,7 @@ use text::OffsetRangeExt;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{ButtonLike, TintColor, Toggleable, prelude::*};
|
||||
use util::{ResultExt, debug_panic, rel_path::RelPath};
|
||||
use workspace::{Workspace, notifications::NotifyResultExt as _};
|
||||
use workspace::{CollaboratorId, Workspace, notifications::NotifyResultExt as _};
|
||||
use zed_actions::agent::Chat;
|
||||
|
||||
pub struct MessageEditor {
|
||||
@@ -234,8 +236,16 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let uri = MentionUri::Thread {
|
||||
id: thread.id.clone(),
|
||||
name: thread.title.to_string(),
|
||||
};
|
||||
let content = format!("{}\n", uri.as_link());
|
||||
|
||||
let content_len = content.len() - 1;
|
||||
|
||||
let start = self.editor.update(cx, |editor, cx| {
|
||||
editor.set_text(format!("{}\n", thread.title), window, cx);
|
||||
editor.set_text(content, window, cx);
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
@@ -244,18 +254,8 @@ impl MessageEditor {
|
||||
.text_anchor
|
||||
});
|
||||
|
||||
self.confirm_mention_completion(
|
||||
thread.title.clone(),
|
||||
start,
|
||||
thread.title.len(),
|
||||
MentionUri::Thread {
|
||||
id: thread.id.clone(),
|
||||
name: thread.title.to_string(),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach();
|
||||
self.confirm_mention_completion(thread.title, start, content_len, uri, window, cx)
|
||||
.detach();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -592,6 +592,21 @@ impl MessageEditor {
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Take this explanation with a grain of salt but, with creases being
|
||||
// inserted, GPUI's recomputes the editor layout in the next frames, so
|
||||
// directly calling `editor.request_autoscroll` wouldn't work as
|
||||
// expected. We're leveraging `cx.on_next_frame` to wait 2 frames and
|
||||
// ensure that the layout has been recalculated so that the autoscroll
|
||||
// request actually shows the cursor's new position.
|
||||
let editor = self.editor.clone();
|
||||
cx.on_next_frame(window, move |_, window, cx| {
|
||||
cx.on_next_frame(window, move |_, _, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.request_autoscroll(Autoscroll::fit(), cx)
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn confirm_mention_for_thread(
|
||||
@@ -702,7 +717,7 @@ impl MessageEditor {
|
||||
let mut all_tracked_buffers = Vec::new();
|
||||
|
||||
let result = editor.update(cx, |editor, cx| {
|
||||
let mut ix = 0;
|
||||
let mut ix = text.chars().position(|c| !c.is_whitespace()).unwrap_or(0);
|
||||
let mut chunks: Vec<acp::ContentBlock> = Vec::new();
|
||||
let text = editor.text(cx);
|
||||
editor.display_map.update(cx, |map, cx| {
|
||||
@@ -714,15 +729,6 @@ impl MessageEditor {
|
||||
|
||||
let crease_range = crease.range().to_offset(&snapshot.buffer_snapshot());
|
||||
if crease_range.start > ix {
|
||||
//todo(): Custom slash command ContentBlock?
|
||||
// let chunk = if prevent_slash_commands
|
||||
// && ix == 0
|
||||
// && parse_slash_command(&text[ix..]).is_some()
|
||||
// {
|
||||
// format!(" {}", &text[ix..crease_range.start]).into()
|
||||
// } else {
|
||||
// text[ix..crease_range.start].into()
|
||||
// };
|
||||
let chunk = text[ix..crease_range.start].into();
|
||||
chunks.push(chunk);
|
||||
}
|
||||
@@ -783,15 +789,6 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
if ix < text.len() {
|
||||
//todo(): Custom slash command ContentBlock?
|
||||
// let last_chunk = if prevent_slash_commands
|
||||
// && ix == 0
|
||||
// && parse_slash_command(&text[ix..]).is_some()
|
||||
// {
|
||||
// format!(" {}", text[ix..].trim_end())
|
||||
// } else {
|
||||
// text[ix..].trim_end().to_owned()
|
||||
// };
|
||||
let last_chunk = text[ix..].trim_end().to_owned();
|
||||
if !last_chunk.is_empty() {
|
||||
chunks.push(last_chunk.into());
|
||||
@@ -831,6 +828,21 @@ impl MessageEditor {
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn chat_with_follow(
|
||||
&mut self,
|
||||
_: &ChatWithFollow,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.follow(CollaboratorId::Agent, window, cx)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(MessageEditorEvent::Cancel)
|
||||
}
|
||||
@@ -1034,6 +1046,7 @@ impl MessageEditor {
|
||||
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
|
||||
message_editor.request_autoscroll(Autoscroll::fit(), cx);
|
||||
});
|
||||
if let Some(confirm) = completion.confirm {
|
||||
confirm(CompletionIntent::Complete, window, cx);
|
||||
@@ -1062,6 +1075,7 @@ impl MessageEditor {
|
||||
) {
|
||||
self.clear(window, cx);
|
||||
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let mut text = String::new();
|
||||
let mut mentions = Vec::new();
|
||||
|
||||
@@ -1074,7 +1088,8 @@ impl MessageEditor {
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(resource),
|
||||
..
|
||||
}) => {
|
||||
let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() else {
|
||||
let Some(mention_uri) = MentionUri::parse(&resource.uri, path_style).log_err()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let start = text.len();
|
||||
@@ -1090,7 +1105,9 @@ impl MessageEditor {
|
||||
));
|
||||
}
|
||||
acp::ContentBlock::ResourceLink(resource) => {
|
||||
if let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() {
|
||||
if let Some(mention_uri) =
|
||||
MentionUri::parse(&resource.uri, path_style).log_err()
|
||||
{
|
||||
let start = text.len();
|
||||
write!(&mut text, "{}", mention_uri.as_link()).ok();
|
||||
let end = text.len();
|
||||
@@ -1105,7 +1122,7 @@ impl MessageEditor {
|
||||
meta: _,
|
||||
}) => {
|
||||
let mention_uri = if let Some(uri) = uri {
|
||||
MentionUri::parse(&uri)
|
||||
MentionUri::parse(&uri, path_style)
|
||||
} else {
|
||||
Ok(MentionUri::PastedImage)
|
||||
};
|
||||
@@ -1290,6 +1307,7 @@ impl Render for MessageEditor {
|
||||
div()
|
||||
.key_context("MessageEditor")
|
||||
.on_action(cx.listener(Self::chat))
|
||||
.on_action(cx.listener(Self::chat_with_follow))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.flex_1()
|
||||
@@ -1598,6 +1616,7 @@ mod tests {
|
||||
use gpui::{
|
||||
AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use lsp::{CompletionContext, CompletionTriggerKind};
|
||||
use project::{CompletionIntent, Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
@@ -2179,10 +2198,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
format!("eight.txt b{slash}"),
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
]
|
||||
);
|
||||
editor.set_text("", window, cx);
|
||||
@@ -2210,10 +2229,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("eight.txt dir{slash}b{slash}"),
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
format!("eight.txt b{slash}"),
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into(),
|
||||
"Threads".into(),
|
||||
@@ -2246,7 +2265,7 @@ mod tests {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt dir{slash}a{slash}")]
|
||||
vec![format!("one.txt a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2293,7 +2312,10 @@ mod tests {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(content, "1");
|
||||
pretty_assertions::assert_eq!(uri, &url_one.parse::<MentionUri>().unwrap());
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_one, PathStyle::local()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let contents = message_editor
|
||||
@@ -2314,7 +2336,10 @@ mod tests {
|
||||
let [(uri, Mention::UriOnly)] = contents.as_slice() else {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(uri, &url_one.parse::<MentionUri>().unwrap());
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_one, PathStyle::local()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
cx.simulate_input(" ");
|
||||
@@ -2375,7 +2400,10 @@ mod tests {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(content, "8");
|
||||
pretty_assertions::assert_eq!(uri, &url_eight.parse::<MentionUri>().unwrap());
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_eight, PathStyle::local()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
@@ -2460,7 +2488,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) @symbol ")
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &["MySymbol"]);
|
||||
assert_eq!(current_completion_labels(editor), &["MySymbol one.txt L1"]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2516,7 +2544,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
assert_eq!(current_completion_labels(editor), &["x.png "]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2558,7 +2586,7 @@ mod tests {
|
||||
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
|
||||
assert_eq!(current_completion_labels(editor), &["x.png "]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2734,4 +2762,295 @@ mod tests {
|
||||
_ => panic!("Expected Text mention for small file"),
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_thread_summary(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/project", json!({"file": ""})).await;
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
// Create a thread metadata to insert as summary
|
||||
let thread_metadata = agent::DbThreadMetadata {
|
||||
id: acp::SessionId("thread-123".into()),
|
||||
title: "Previous Conversation".into(),
|
||||
updated_at: chrono::Utc::now(),
|
||||
};
|
||||
|
||||
let message_editor = cx.update(|window, cx| {
|
||||
cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: None,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.insert_thread_summary(thread_metadata.clone(), window, cx);
|
||||
editor
|
||||
})
|
||||
});
|
||||
|
||||
// Construct expected values for verification
|
||||
let expected_uri = MentionUri::Thread {
|
||||
id: thread_metadata.id.clone(),
|
||||
name: thread_metadata.title.to_string(),
|
||||
};
|
||||
let expected_link = format!("[@{}]({})", thread_metadata.title, expected_uri.to_uri());
|
||||
|
||||
message_editor.read_with(cx, |editor, cx| {
|
||||
let text = editor.text(cx);
|
||||
|
||||
assert!(
|
||||
text.contains(&expected_link),
|
||||
"Expected editor text to contain thread mention link.\nExpected substring: {}\nActual text: {}",
|
||||
expected_link,
|
||||
text
|
||||
);
|
||||
|
||||
let mentions = editor.mentions();
|
||||
assert_eq!(
|
||||
mentions.len(),
|
||||
1,
|
||||
"Expected exactly one mention after inserting thread summary"
|
||||
);
|
||||
|
||||
assert!(
|
||||
mentions.contains(&expected_uri),
|
||||
"Expected mentions to contain the thread URI"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_whitespace_trimming(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/project", json!({"file.rs": "fn main() {}"}))
|
||||
.await;
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
let message_editor = cx.update(|window, cx| {
|
||||
cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: None,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
});
|
||||
let editor = message_editor.update(cx, |message_editor, _| message_editor.editor.clone());
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(" hello world ", window, cx);
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
.update(cx, |message_editor, cx| message_editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "hello world".into(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_autoscroll_after_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"test.txt": "line1\nline2\nline3\nline4\nline5\n",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
|
||||
let worktree = project.update(cx, |project, cx| {
|
||||
let mut worktrees = project.worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 1);
|
||||
worktrees.pop().unwrap()
|
||||
});
|
||||
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
|
||||
|
||||
let mut cx = VisualTestContext::from_window(*window, cx);
|
||||
|
||||
// Open a regular editor with the created file, and select a portion of
|
||||
// the text that will be used for the selections that are meant to be
|
||||
// inserted in the agent panel.
|
||||
let editor = workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: rel_path("test.txt").into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([Point::new(0, 0)..Point::new(0, 5)]);
|
||||
});
|
||||
});
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
// Create a new `MessageEditor`. The `EditorMode::full()` has to be used
|
||||
// to ensure we have a fixed viewport, so we can eventually actually
|
||||
// place the cursor outside of the visible area.
|
||||
let message_editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let workspace_handle = cx.weak_entity();
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::full(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
message_editor
|
||||
});
|
||||
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
// Update the Agent Panel's Message Editor text to have 100
|
||||
// lines, ensuring that the cursor is set at line 90 and that we
|
||||
// then scroll all the way to the top, so the cursor's position
|
||||
// remains off screen.
|
||||
let mut lines = String::new();
|
||||
for _ in 1..=100 {
|
||||
lines.push_str(&"Another line in the agent panel's message editor\n");
|
||||
}
|
||||
editor.set_text(lines.as_str(), window, cx);
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([Point::new(90, 0)..Point::new(90, 0)]);
|
||||
});
|
||||
editor.set_scroll_position(gpui::Point::new(0., 0.), window, cx);
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Before proceeding, let's assert that the cursor is indeed off screen,
|
||||
// otherwise the rest of the test doesn't make sense.
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
|
||||
let scroll_top = snapshot.scroll_position().y as u32;
|
||||
let visible_lines = editor.visible_line_count().unwrap() as u32;
|
||||
let visible_range = scroll_top..(scroll_top + visible_lines);
|
||||
|
||||
assert!(!visible_range.contains(&cursor_row));
|
||||
})
|
||||
});
|
||||
|
||||
// Now let's insert the selection in the Agent Panel's editor and
|
||||
// confirm that, after the insertion, the cursor is now in the visible
|
||||
// range.
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
message_editor.update_in(&mut cx, |message_editor, window, cx| {
|
||||
message_editor.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let cursor_row = editor.selections.newest::<Point>(&snapshot).head().row;
|
||||
let scroll_top = snapshot.scroll_position().y as u32;
|
||||
let visible_lines = editor.visible_line_count().unwrap() as u32;
|
||||
let visible_range = scroll_top..(scroll_top + visible_lines);
|
||||
|
||||
assert!(visible_range.contains(&cursor_row));
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use acp_thread::AgentSessionModes;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_servers::AgentServer;
|
||||
use agent_settings::AgentSettings;
|
||||
use fs::Fs;
|
||||
use gpui::{Context, Entity, FocusHandle, WeakEntity, Window, prelude::*};
|
||||
use settings::Settings as _;
|
||||
use std::{rc::Rc, sync::Arc};
|
||||
use ui::{
|
||||
Button, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, KeyBinding,
|
||||
@@ -84,6 +86,14 @@ impl ModeSelector {
|
||||
let current_mode = self.connection.current_mode();
|
||||
let default_mode = self.agent_server.default_mode(cx);
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
let side = match settings.dock {
|
||||
settings::DockPosition::Left => DocumentationSide::Right,
|
||||
settings::DockPosition::Bottom | settings::DockPosition::Right => {
|
||||
DocumentationSide::Left
|
||||
}
|
||||
};
|
||||
|
||||
for mode in all_modes {
|
||||
let is_selected = &mode.id == ¤t_mode;
|
||||
let is_default = Some(&mode.id) == default_mode.as_ref();
|
||||
@@ -91,7 +101,7 @@ impl ModeSelector {
|
||||
.toggleable(IconPosition::End, is_selected);
|
||||
|
||||
let entry = if let Some(description) = &mode.description {
|
||||
entry.documentation_aside(DocumentationSide::Left, DocumentationEdge::Bottom, {
|
||||
entry.documentation_aside(side, DocumentationEdge::Bottom, {
|
||||
let description = description.clone();
|
||||
|
||||
move |cx| {
|
||||
|
||||
@@ -450,6 +450,7 @@ impl Render for AcpThreadHistory {
|
||||
v_flex()
|
||||
.key_context("ThreadHistory")
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.on_action(cx.listener(Self::select_previous))
|
||||
.on_action(cx.listener(Self::select_next))
|
||||
.on_action(cx.listener(Self::select_first))
|
||||
|
||||
@@ -17,7 +17,9 @@ use client::zed_urls;
|
||||
use cloud_llm_client::PlanV1;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::scroll::Autoscroll;
|
||||
use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects};
|
||||
use editor::{
|
||||
Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects, SizingBehavior,
|
||||
};
|
||||
use file_icons::FileIcons;
|
||||
use fs::Fs;
|
||||
use futures::FutureExt as _;
|
||||
@@ -102,7 +104,7 @@ impl ThreadError {
|
||||
{
|
||||
Self::AuthenticationRequired(acp_error.message.clone().into())
|
||||
} else {
|
||||
let string = error.to_string();
|
||||
let string = format!("{:#}", error);
|
||||
// TODO: we should have Gemini return better errors here.
|
||||
if agent.clone().downcast::<agent_servers::Gemini>().is_some()
|
||||
&& string.contains("Could not load the default credentials")
|
||||
@@ -111,7 +113,7 @@ impl ThreadError {
|
||||
{
|
||||
Self::AuthenticationRequired(string.into())
|
||||
} else {
|
||||
Self::Other(error.to_string().into())
|
||||
Self::Other(string.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -793,7 +795,8 @@ impl AcpThreadView {
|
||||
if let Some(load_err) = err.downcast_ref::<LoadError>() {
|
||||
self.thread_state = ThreadState::LoadError(load_err.clone());
|
||||
} else {
|
||||
self.thread_state = ThreadState::LoadError(LoadError::Other(err.to_string().into()))
|
||||
self.thread_state =
|
||||
ThreadState::LoadError(LoadError::Other(format!("{:#}", err).into()))
|
||||
}
|
||||
if self.message_editor.focus_handle(cx).is_focused(window) {
|
||||
self.focus_handle.focus(window)
|
||||
@@ -881,6 +884,7 @@ impl AcpThreadView {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.set_editor_is_expanded(!self.editor_expanded, cx);
|
||||
cx.stop_propagation();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -892,7 +896,7 @@ impl AcpThreadView {
|
||||
EditorMode::Full {
|
||||
scale_ui_elements_with_buffer_font_size: false,
|
||||
show_active_line_background: false,
|
||||
sized_by_content: false,
|
||||
sizing_behavior: SizingBehavior::ExcludeOverscrollMargin,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
@@ -1469,6 +1473,106 @@ impl AcpThreadView {
|
||||
return;
|
||||
};
|
||||
|
||||
// Check for the experimental "terminal-auth" _meta field
|
||||
let auth_method = connection.auth_methods().iter().find(|m| m.id == method);
|
||||
|
||||
if let Some(auth_method) = auth_method {
|
||||
if let Some(meta) = &auth_method.meta {
|
||||
if let Some(terminal_auth) = meta.get("terminal-auth") {
|
||||
// Extract terminal auth details from meta
|
||||
if let (Some(command), Some(label)) = (
|
||||
terminal_auth.get("command").and_then(|v| v.as_str()),
|
||||
terminal_auth.get("label").and_then(|v| v.as_str()),
|
||||
) {
|
||||
let args = terminal_auth
|
||||
.get("args")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(String::from))
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let env = terminal_auth
|
||||
.get("env")
|
||||
.and_then(|v| v.as_object())
|
||||
.map(|obj| {
|
||||
obj.iter()
|
||||
.filter_map(|(k, v)| {
|
||||
v.as_str().map(|val| (k.clone(), val.to_string()))
|
||||
})
|
||||
.collect::<HashMap<String, String>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Build SpawnInTerminal from _meta
|
||||
let login = task::SpawnInTerminal {
|
||||
id: task::TaskId(format!("external-agent-{}-login", label)),
|
||||
full_label: label.to_string(),
|
||||
label: label.to_string(),
|
||||
command: Some(command.to_string()),
|
||||
args,
|
||||
command_label: label.to_string(),
|
||||
env,
|
||||
use_new_terminal: true,
|
||||
allow_concurrent_runs: true,
|
||||
hide: task::HideStrategy::Always,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
self.thread_error.take();
|
||||
configuration_view.take();
|
||||
pending_auth_method.replace(method.clone());
|
||||
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
let authenticate = Self::spawn_external_agent_login(
|
||||
login, workspace, false, window, cx,
|
||||
);
|
||||
cx.notify();
|
||||
self.auth_task = Some(cx.spawn_in(window, {
|
||||
let agent = self.agent.clone();
|
||||
async move |this, cx| {
|
||||
let result = authenticate.await;
|
||||
|
||||
match &result {
|
||||
Ok(_) => telemetry::event!(
|
||||
"Authenticate Agent Succeeded",
|
||||
agent = agent.telemetry_id()
|
||||
),
|
||||
Err(_) => {
|
||||
telemetry::event!(
|
||||
"Authenticate Agent Failed",
|
||||
agent = agent.telemetry_id(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
if let Err(err) = result {
|
||||
if let ThreadState::Unauthenticated {
|
||||
pending_auth_method,
|
||||
..
|
||||
} = &mut this.thread_state
|
||||
{
|
||||
pending_auth_method.take();
|
||||
}
|
||||
this.handle_thread_error(err, cx);
|
||||
} else {
|
||||
this.reset(window, cx);
|
||||
}
|
||||
this.auth_task.take()
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}));
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if method.0.as_ref() == "gemini-api-key" {
|
||||
let registry = LanguageModelRegistry::global(cx);
|
||||
let provider = registry
|
||||
@@ -1947,6 +2051,15 @@ impl AcpThreadView {
|
||||
.into_any(),
|
||||
};
|
||||
|
||||
let needs_confirmation = if let AgentThreadEntry::ToolCall(tool_call) = entry {
|
||||
matches!(
|
||||
tool_call.status,
|
||||
ToolCallStatus::WaitingForConfirmation { .. }
|
||||
)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let Some(thread) = self.thread() else {
|
||||
return primary;
|
||||
};
|
||||
@@ -1955,7 +2068,13 @@ impl AcpThreadView {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(primary)
|
||||
.child(self.render_thread_controls(&thread, cx))
|
||||
.map(|this| {
|
||||
if needs_confirmation {
|
||||
this.child(self.render_generating(true))
|
||||
} else {
|
||||
this.child(self.render_thread_controls(&thread, cx))
|
||||
}
|
||||
})
|
||||
.when_some(
|
||||
self.thread_feedback.comments_editor.clone(),
|
||||
|this, editor| this.child(Self::render_feedback_feedback_editor(editor, cx)),
|
||||
@@ -3631,6 +3750,7 @@ impl AcpThreadView {
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("edits-disclosure", expanded))
|
||||
.map(|this| {
|
||||
@@ -3770,6 +3890,7 @@ impl AcpThreadView {
|
||||
Label::new(name.to_string())
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx)
|
||||
.ml_1p5()
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(path.as_std_path(), cx)
|
||||
@@ -3801,14 +3922,30 @@ impl AcpThreadView {
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.id(("file-name-row", index))
|
||||
.relative()
|
||||
.id(("file-name", index))
|
||||
.pr_8()
|
||||
.gap_1p5()
|
||||
.w_full()
|
||||
.overflow_x_scroll()
|
||||
.child(file_icon)
|
||||
.child(h_flex().gap_0p5().children(file_name).children(file_path))
|
||||
.child(
|
||||
h_flex()
|
||||
.id(("file-name-path", index))
|
||||
.cursor_pointer()
|
||||
.pr_0p5()
|
||||
.gap_0p5()
|
||||
.hover(|s| s.bg(cx.theme().colors().element_hover))
|
||||
.rounded_xs()
|
||||
.child(file_icon)
|
||||
.children(file_name)
|
||||
.children(file_path)
|
||||
.tooltip(Tooltip::text("Go to File"))
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.open_edited_buffer(&buffer, window, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
@@ -3818,13 +3955,7 @@ impl AcpThreadView {
|
||||
.bottom_0()
|
||||
.right_0()
|
||||
.bg(overlay_gradient),
|
||||
)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.open_edited_buffer(&buffer, window, cx);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -3966,8 +4097,12 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(ExpandMessageEditor), cx);
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.expand_message_editor(
|
||||
&ExpandMessageEditor,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})),
|
||||
),
|
||||
),
|
||||
@@ -4305,7 +4440,8 @@ impl AcpThreadView {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(mention) = MentionUri::parse(&url).log_err() {
|
||||
if let Some(mention) = MentionUri::parse(&url, workspace.read(cx).path_style(cx)).log_err()
|
||||
{
|
||||
workspace.update(cx, |workspace, cx| match mention {
|
||||
MentionUri::File { abs_path } => {
|
||||
let project = workspace.project();
|
||||
@@ -4570,14 +4706,29 @@ impl AcpThreadView {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if window.is_window_active() || !self.notifications.is_empty() {
|
||||
if !self.notifications.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
let window_is_inactive = !window.is_window_active();
|
||||
let panel_is_hidden = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.map(|workspace| AgentPanel::is_hidden(&workspace, cx))
|
||||
.unwrap_or(true);
|
||||
|
||||
let should_notify = window_is_inactive || panel_is_hidden;
|
||||
|
||||
if !should_notify {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Change this once we have title summarization for external agents.
|
||||
let title = self.agent.name();
|
||||
|
||||
match AgentSettings::get_global(cx).notify_when_agent_waiting {
|
||||
match settings.notify_when_agent_waiting {
|
||||
NotifyWhenAgentWaiting::PrimaryScreen => {
|
||||
if let Some(primary) = cx.primary_display() {
|
||||
self.pop_up(icon, caption.into(), title, window, primary, cx);
|
||||
@@ -4693,6 +4844,31 @@ impl AcpThreadView {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_generating(&self, confirmation: bool) -> impl IntoElement {
|
||||
h_flex()
|
||||
.id("generating-spinner")
|
||||
.py_2()
|
||||
.px(rems_from_px(22.))
|
||||
.map(|this| {
|
||||
if confirmation {
|
||||
this.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.w_2()
|
||||
.child(SpinnerLabel::sand().size(LabelSize::Small)),
|
||||
)
|
||||
.child(
|
||||
LoadingLabel::new("Waiting Confirmation")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
} else {
|
||||
this.child(SpinnerLabel::new().size(LabelSize::Small))
|
||||
}
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn render_thread_controls(
|
||||
&self,
|
||||
thread: &Entity<AcpThread>,
|
||||
@@ -4700,12 +4876,7 @@ impl AcpThreadView {
|
||||
) -> impl IntoElement {
|
||||
let is_generating = matches!(thread.read(cx).status(), ThreadStatus::Generating);
|
||||
if is_generating {
|
||||
return h_flex().id("thread-controls-container").child(
|
||||
div()
|
||||
.py_2()
|
||||
.px(rems_from_px(22.))
|
||||
.child(SpinnerLabel::new().size(LabelSize::Small)),
|
||||
);
|
||||
return self.render_generating(false).into_any_element();
|
||||
}
|
||||
|
||||
let open_as_markdown = IconButton::new("open-as-markdown", IconName::FileMarkdown)
|
||||
@@ -4793,7 +4964,10 @@ impl AcpThreadView {
|
||||
);
|
||||
}
|
||||
|
||||
container.child(open_as_markdown).child(scroll_to_top)
|
||||
container
|
||||
.child(open_as_markdown)
|
||||
.child(scroll_to_top)
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn render_feedback_feedback_editor(editor: Entity<Editor>, cx: &Context<Self>) -> Div {
|
||||
@@ -5580,7 +5754,7 @@ fn default_markdown_style(
|
||||
let theme_settings = ThemeSettings::get_global(cx);
|
||||
let colors = cx.theme().colors();
|
||||
|
||||
let buffer_font_size = TextSize::Small.rems(cx);
|
||||
let buffer_font_size = theme_settings.agent_buffer_font_size(cx);
|
||||
|
||||
let mut text_style = window.text_style();
|
||||
let line_height = buffer_font_size * 1.75;
|
||||
@@ -5592,9 +5766,9 @@ fn default_markdown_style(
|
||||
};
|
||||
|
||||
let font_size = if buffer_font {
|
||||
TextSize::Small.rems(cx)
|
||||
theme_settings.agent_buffer_font_size(cx)
|
||||
} else {
|
||||
TextSize::Default.rems(cx)
|
||||
theme_settings.agent_ui_font_size(cx)
|
||||
};
|
||||
|
||||
let text_color = if muted_text {
|
||||
@@ -5891,6 +6065,107 @@ pub(crate) mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_notification_when_panel_hidden(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
|
||||
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Hello", window, cx);
|
||||
});
|
||||
|
||||
// Window is active (don't deactivate), but panel will be hidden
|
||||
// Note: In the test environment, the panel is not actually added to the dock,
|
||||
// so is_agent_panel_hidden will return true
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
thread_view.send(window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Should show notification because window is active but panel is hidden
|
||||
assert!(
|
||||
cx.windows()
|
||||
.iter()
|
||||
.any(|window| window.downcast::<AgentNotification>().is_some()),
|
||||
"Expected notification when panel is hidden"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_notification_still_works_when_window_inactive(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Hello", window, cx);
|
||||
});
|
||||
|
||||
// Deactivate window - should show notification regardless of setting
|
||||
cx.deactivate_window();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
thread_view.send(window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Should still show notification when window is inactive (existing behavior)
|
||||
assert!(
|
||||
cx.windows()
|
||||
.iter()
|
||||
.any(|window| window.downcast::<AgentNotification>().is_some()),
|
||||
"Expected notification when window is inactive"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_notification_respects_never_setting(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// Set notify_when_agent_waiting to Never
|
||||
cx.update(|cx| {
|
||||
AgentSettings::override_global(
|
||||
AgentSettings {
|
||||
notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
|
||||
..AgentSettings::get_global(cx).clone()
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await;
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Hello", window, cx);
|
||||
});
|
||||
|
||||
// Window is active
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
thread_view.send(window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Should NOT show notification because notify_when_agent_waiting is Never
|
||||
assert!(
|
||||
!cx.windows()
|
||||
.iter()
|
||||
.any(|window| window.downcast::<AgentNotification>().is_some()),
|
||||
"Expected no notification when notify_when_agent_waiting is Never"
|
||||
);
|
||||
}
|
||||
|
||||
async fn setup_thread_view(
|
||||
agent: impl AgentServer + 'static,
|
||||
cx: &mut TestAppContext,
|
||||
@@ -5981,9 +6256,12 @@ pub(crate) mod tests {
|
||||
impl StubAgentServer<StubAgentConnection> {
|
||||
fn default_response() -> Self {
|
||||
let conn = StubAgentConnection::new();
|
||||
conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: "Default response".into(),
|
||||
}]);
|
||||
conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: "Default response".into(),
|
||||
meta: None,
|
||||
},
|
||||
)]);
|
||||
Self::new(conn)
|
||||
}
|
||||
}
|
||||
@@ -6334,13 +6612,16 @@ pub(crate) mod tests {
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
},
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
@@ -6424,13 +6705,16 @@ pub(crate) mod tests {
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
},
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) =
|
||||
setup_thread_view(StubAgentServer::new(connection.clone()), cx).await;
|
||||
@@ -6468,13 +6752,16 @@ pub(crate) mod tests {
|
||||
});
|
||||
|
||||
// Send
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "New Response".into(),
|
||||
annotations: None,
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "New Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
},
|
||||
)]);
|
||||
|
||||
user_message_editor.update_in(cx, |_editor, window, cx| {
|
||||
window.dispatch_action(Box::new(Chat), cx);
|
||||
@@ -6561,13 +6848,14 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
connection.end_turn(session_id, acp::StopReason::EndTurn);
|
||||
@@ -6619,9 +6907,10 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "Message 1 resp".into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -6655,9 +6944,10 @@ pub(crate) mod tests {
|
||||
// Simulate a response sent after beginning to cancel
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "onse".into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -6688,9 +6978,10 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk {
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "Message 2 response".into(),
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
connection.end_turn(session_id.clone(), acp::StopReason::EndTurn);
|
||||
@@ -6728,13 +7019,16 @@ pub(crate) mod tests {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
},
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
@@ -6811,13 +7105,16 @@ pub(crate) mod tests {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
},
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
@@ -23,15 +23,17 @@ use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use language_models::AllLanguageModelSettings;
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{SettingsStore, update_settings_file};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
|
||||
ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
|
||||
SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
@@ -152,7 +154,42 @@ pub enum AssistantConfigurationEvent {
|
||||
|
||||
impl EventEmitter<AssistantConfigurationEvent> for AgentConfiguration {}
|
||||
|
||||
enum AgentIcon {
|
||||
Name(IconName),
|
||||
Path(SharedString),
|
||||
}
|
||||
|
||||
impl AgentConfiguration {
|
||||
fn render_section_title(
|
||||
&mut self,
|
||||
title: impl Into<SharedString>,
|
||||
description: impl Into<SharedString>,
|
||||
menu: AnyElement,
|
||||
) -> impl IntoElement {
|
||||
h_flex()
|
||||
.p_4()
|
||||
.pb_0()
|
||||
.mb_2p5()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.flex_wrap()
|
||||
.child(Headline::new(title.into()))
|
||||
.child(menu),
|
||||
)
|
||||
.child(Label::new(description.into()).color(Color::Muted)),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_provider_configuration_block(
|
||||
&mut self,
|
||||
provider: &Arc<dyn LanguageModelProvider>,
|
||||
@@ -287,7 +324,7 @@ impl AgentConfiguration {
|
||||
"Start New Thread",
|
||||
)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Filled)
|
||||
.style(ButtonStyle::Outlined)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Thread)
|
||||
@@ -303,89 +340,122 @@ impl AgentConfiguration {
|
||||
}
|
||||
})),
|
||||
)
|
||||
}),
|
||||
})
|
||||
.when(
|
||||
is_expanded && is_removable_provider(&provider.id(), cx),
|
||||
|this| {
|
||||
this.child(
|
||||
Button::new(
|
||||
SharedString::from(format!("delete-provider-{provider_id}")),
|
||||
"Remove Provider",
|
||||
)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Trash)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener({
|
||||
let provider = provider.clone();
|
||||
move |this, _event, window, cx| {
|
||||
this.delete_provider(provider.clone(), window, cx);
|
||||
}
|
||||
})),
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn delete_provider(
|
||||
&mut self,
|
||||
provider: Arc<dyn LanguageModelProvider>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let fs = self.fs.clone();
|
||||
let provider_id = provider.id();
|
||||
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
cx.update(|_window, cx| {
|
||||
update_settings_file(fs.clone(), cx, {
|
||||
let provider_id = provider_id.clone();
|
||||
move |settings, _| {
|
||||
if let Some(ref mut openai_compatible) = settings
|
||||
.language_models
|
||||
.as_mut()
|
||||
.and_then(|lm| lm.openai_compatible.as_mut())
|
||||
{
|
||||
let key_to_remove: Arc<str> = Arc::from(provider_id.0.as_ref());
|
||||
openai_compatible.remove(&key_to_remove);
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
|
||||
cx.update(|_window, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, {
|
||||
let provider_id = provider_id.clone();
|
||||
move |registry, cx| {
|
||||
registry.unregister_provider(provider_id, cx);
|
||||
}
|
||||
})
|
||||
})
|
||||
.log_err();
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn render_provider_configuration_section(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let providers = LanguageModelRegistry::read_global(cx).providers();
|
||||
let popover_menu = PopoverMenu::new("add-provider-popover")
|
||||
.trigger(
|
||||
Button::new("add-provider", "Add Provider")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
|
||||
menu.header("Compatible APIs").entry("OpenAI", None, {
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AddLlmProviderModal::toggle(
|
||||
LlmCompatibleProvider::OpenAi,
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.pb_0()
|
||||
.mb_2p5()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(Headline::new("LLM Providers"))
|
||||
.child(
|
||||
PopoverMenu::new("add-provider-popover")
|
||||
.trigger(
|
||||
Button::new("add-provider", "Add Provider")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(
|
||||
window,
|
||||
cx,
|
||||
|menu, _window, _cx| {
|
||||
menu.header("Compatible APIs").entry(
|
||||
"OpenAI",
|
||||
None,
|
||||
{
|
||||
let workspace =
|
||||
workspace.clone();
|
||||
move |window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AddLlmProviderModal::toggle(
|
||||
LlmCompatibleProvider::OpenAi,
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
},
|
||||
)
|
||||
},
|
||||
))
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new("Add at least one provider to use AI-powered features with Zed's native agent.")
|
||||
.color(Color::Muted),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(self.render_section_title(
|
||||
"LLM Providers",
|
||||
"Add at least one provider to use AI-powered features with Zed's native agent.",
|
||||
popover_menu.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
div()
|
||||
.w_full()
|
||||
@@ -464,8 +534,7 @@ impl AgentConfiguration {
|
||||
let add_server_popover = PopoverMenu::new("add-server-popover")
|
||||
.trigger(
|
||||
Button::new("add-server", "Add Server")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
@@ -498,61 +567,57 @@ impl AgentConfiguration {
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.render_section_title(
|
||||
"Model Context Protocol (MCP) Servers",
|
||||
"All MCP servers connected directly or via a Zed extension.",
|
||||
add_server_popover.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
h_flex()
|
||||
v_flex()
|
||||
.pl_4()
|
||||
.pb_4()
|
||||
.pr_5()
|
||||
.w_full()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.gap_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Model Context Protocol (MCP) Servers"))
|
||||
.child(
|
||||
Label::new(
|
||||
"All MCP servers connected directly or via a Zed extension.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(add_server_popover),
|
||||
)
|
||||
.child(v_flex().w_full().gap_1().map(|mut parent| {
|
||||
if context_server_ids.is_empty() {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.p_4()
|
||||
.justify_center()
|
||||
.border_1()
|
||||
.border_dashed()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.rounded_sm()
|
||||
.child(
|
||||
Label::new("No MCP servers added yet.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
for (index, context_server_id) in context_server_ids.into_iter().enumerate() {
|
||||
if index > 0 {
|
||||
parent = parent.child(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
.map(|mut parent| {
|
||||
if context_server_ids.is_empty() {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.p_4()
|
||||
.justify_center()
|
||||
.border_1()
|
||||
.border_dashed()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.rounded_sm()
|
||||
.child(
|
||||
Label::new("No MCP servers added yet.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
for (index, context_server_id) in
|
||||
context_server_ids.into_iter().enumerate()
|
||||
{
|
||||
if index > 0 {
|
||||
parent = parent.child(
|
||||
Divider::horizontal()
|
||||
.color(DividerColor::BorderFaded)
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
parent = parent.child(self.render_context_server(
|
||||
context_server_id,
|
||||
window,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
parent
|
||||
}
|
||||
parent =
|
||||
parent.child(self.render_context_server(context_server_id, window, cx));
|
||||
}
|
||||
parent
|
||||
}
|
||||
}))
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_context_server(
|
||||
@@ -597,12 +662,12 @@ impl AgentConfiguration {
|
||||
|
||||
let (source_icon, source_tooltip) = if is_from_extension {
|
||||
(
|
||||
IconName::ZedMcpExtension,
|
||||
IconName::ZedSrcExtension,
|
||||
"This MCP server was installed from an extension.",
|
||||
)
|
||||
} else {
|
||||
(
|
||||
IconName::ZedMcpCustom,
|
||||
IconName::ZedSrcCustom,
|
||||
"This custom MCP server was installed directly.",
|
||||
)
|
||||
};
|
||||
@@ -884,9 +949,9 @@ impl AgentConfiguration {
|
||||
}
|
||||
|
||||
fn render_agent_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let user_defined_agents = self
|
||||
.agent_server_store
|
||||
.read(cx)
|
||||
let agent_server_store = self.agent_server_store.read(cx);
|
||||
|
||||
let user_defined_agents = agent_server_store
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
@@ -897,102 +962,121 @@ impl AgentConfiguration {
|
||||
let user_defined_agents = user_defined_agents
|
||||
.into_iter()
|
||||
.map(|name| {
|
||||
self.render_agent_server(IconName::Ai, name)
|
||||
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
|
||||
AgentIcon::Path(icon_path)
|
||||
} else {
|
||||
AgentIcon::Name(IconName::Ai)
|
||||
};
|
||||
self.render_agent_server(icon, name, true)
|
||||
.into_any_element()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let add_agens_button = Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(move |_, window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
});
|
||||
|
||||
v_flex()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.child(self.render_section_title(
|
||||
"External Agents",
|
||||
"All agents connected through the Agent Client Protocol.",
|
||||
add_agens_button.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.pr_1()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(Headline::new("External Agents"))
|
||||
.child(
|
||||
Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(
|
||||
move |_, window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(
|
||||
workspace,
|
||||
cx,
|
||||
).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
),
|
||||
)
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
"All agents connected through the Agent Client Protocol.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiClaude,
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(agent);
|
||||
}
|
||||
parent
|
||||
})
|
||||
.p_4()
|
||||
.pt_0()
|
||||
.gap_2()
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiClaude),
|
||||
"Claude Code",
|
||||
false,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiOpenAi),
|
||||
"Codex CLI",
|
||||
false,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiGemini),
|
||||
"Gemini CLI",
|
||||
false,
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
parent = parent
|
||||
.child(
|
||||
Divider::horizontal().color(DividerColor::BorderFaded),
|
||||
)
|
||||
.child(agent);
|
||||
}
|
||||
parent
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_agent_server(
|
||||
&self,
|
||||
icon: IconName,
|
||||
icon: AgentIcon,
|
||||
name: impl Into<SharedString>,
|
||||
external: bool,
|
||||
) -> impl IntoElement {
|
||||
h_flex().gap_1p5().justify_between().child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
|
||||
.child(Label::new(name.into()))
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Success),
|
||||
),
|
||||
)
|
||||
let name = name.into();
|
||||
let icon = match icon {
|
||||
AgentIcon::Name(icon_name) => Icon::new(icon_name)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
AgentIcon::Path(icon_path) => Icon::from_path(icon_path)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
};
|
||||
|
||||
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
|
||||
let tooltip_message = format!("The {} agent was installed from an extension.", name);
|
||||
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(icon)
|
||||
.child(Label::new(name))
|
||||
.when(external, |this| {
|
||||
this.child(
|
||||
div()
|
||||
.id(tooltip_id)
|
||||
.flex_none()
|
||||
.tooltip(Tooltip::text(tooltip_message))
|
||||
.child(
|
||||
Icon::new(IconName::ZedSrcExtension)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1221,3 +1305,14 @@ fn find_text_in_buffer(
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// OpenAI-compatible providers are user-configured and can be removed,
|
||||
// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't.
|
||||
//
|
||||
// If in the future we have more "API-compatible-type" of providers,
|
||||
// they should be included here as removable providers.
|
||||
fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool {
|
||||
AllLanguageModelSettings::get_global(cx)
|
||||
.openai_compatible
|
||||
.contains_key(provider_id.0.as_ref())
|
||||
}
|
||||
|
||||
@@ -70,14 +70,6 @@ impl AgentDiffThread {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_generating(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
thread.read(cx).status() == acp_thread::ThreadStatus::Generating
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
@@ -576,16 +568,22 @@ impl Item for AgentDiffPane {
|
||||
});
|
||||
}
|
||||
|
||||
fn can_split(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<workspace::WorkspaceId>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>>
|
||||
) -> Task<Option<Entity<Self>>>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Some(cx.new(|cx| Self::new(self.thread.clone(), self.workspace.clone(), window, cx)))
|
||||
Task::ready(Some(cx.new(|cx| {
|
||||
Self::new(self.thread.clone(), self.workspace.clone(), window, cx)
|
||||
})))
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &App) -> bool {
|
||||
@@ -964,9 +962,7 @@ impl AgentDiffToolbar {
|
||||
None => ToolbarItemLocation::Hidden,
|
||||
Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight,
|
||||
Some(AgentDiffToolbarItem::Editor { state, .. }) => match state {
|
||||
EditorState::Generating | EditorState::Reviewing => {
|
||||
ToolbarItemLocation::PrimaryRight
|
||||
}
|
||||
EditorState::Reviewing => ToolbarItemLocation::PrimaryRight,
|
||||
EditorState::Idle => ToolbarItemLocation::Hidden,
|
||||
},
|
||||
}
|
||||
@@ -1044,7 +1040,6 @@ impl Render for AgentDiffToolbar {
|
||||
|
||||
let content = match state {
|
||||
EditorState::Idle => return Empty.into_any(),
|
||||
EditorState::Generating => vec![spinner_icon],
|
||||
EditorState::Reviewing => vec![
|
||||
h_flex()
|
||||
.child(
|
||||
@@ -1216,7 +1211,6 @@ pub struct AgentDiff {
|
||||
pub enum EditorState {
|
||||
Idle,
|
||||
Reviewing,
|
||||
Generating,
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
@@ -1539,15 +1533,11 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.is_generating(cx) {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
};
|
||||
let reviewing_state = EditorState::Reviewing;
|
||||
|
||||
let previous_state = self
|
||||
.reviewing_editors
|
||||
.insert(weak_editor.clone(), new_state.clone());
|
||||
.insert(weak_editor.clone(), reviewing_state.clone());
|
||||
|
||||
if previous_state.is_none() {
|
||||
editor.update(cx, |editor, cx| {
|
||||
@@ -1560,7 +1550,9 @@ impl AgentDiff {
|
||||
unaffected.remove(weak_editor);
|
||||
}
|
||||
|
||||
if new_state == EditorState::Reviewing && previous_state != Some(new_state) {
|
||||
if reviewing_state == EditorState::Reviewing
|
||||
&& previous_state != Some(reviewing_state)
|
||||
{
|
||||
// Jump to first hunk when we enter review mode
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = multibuffer.read(cx).snapshot(cx);
|
||||
|
||||
@@ -6,8 +6,11 @@ use std::sync::Arc;
|
||||
use acp_thread::AcpThread;
|
||||
use agent::{ContextServerRegistry, DbThreadMetadata, HistoryEntry, HistoryStore};
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use project::agent_server_store::{
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
|
||||
use project::{
|
||||
ExternalAgentServerName,
|
||||
agent_server_store::{
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
@@ -16,8 +19,6 @@ use settings::{
|
||||
use zed_actions::OpenBrowser;
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
|
||||
use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant,
|
||||
@@ -30,9 +31,14 @@ use crate::{
|
||||
text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
|
||||
ui::{AgentOnboardingModal, EndTrialUpsell},
|
||||
};
|
||||
use crate::{
|
||||
ExpandMessageEditor,
|
||||
acp::{AcpThreadHistory, ThreadHistoryEvent},
|
||||
};
|
||||
use crate::{
|
||||
ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
|
||||
};
|
||||
use crate::{ManageProfiles, context_store::ContextStore};
|
||||
use agent_settings::AgentSettings;
|
||||
use ai_onboarding::AgentPanelOnboarding;
|
||||
use anyhow::{Result, anyhow};
|
||||
@@ -41,6 +47,8 @@ use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary};
|
||||
use client::{UserStore, zed_urls};
|
||||
use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit};
|
||||
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use extension::ExtensionEvents;
|
||||
use extension_host::ExtensionStore;
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
|
||||
@@ -67,7 +75,9 @@ use workspace::{
|
||||
};
|
||||
use zed_actions::{
|
||||
DecreaseBufferFontSize, IncreaseBufferFontSize, ResetBufferFontSize,
|
||||
agent::{OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetOnboarding},
|
||||
agent::{
|
||||
OpenAcpOnboardingModal, OpenOnboardingModal, OpenSettings, ResetAgentZoom, ResetOnboarding,
|
||||
},
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
@@ -99,6 +109,12 @@ pub fn init(cx: &mut App) {
|
||||
}
|
||||
},
|
||||
)
|
||||
.register_action(|workspace, _: &ExpandMessageEditor, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| panel.expand_message_editor(window, cx));
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &OpenHistory, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
@@ -188,6 +204,13 @@ pub fn init(cx: &mut App) {
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| {
|
||||
TrialEndUpsell::set_dismissed(false, cx);
|
||||
})
|
||||
.register_action(|workspace, _: &ResetAgentZoom, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.reset_agent_zoom(window, cx);
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
)
|
||||
@@ -422,6 +445,7 @@ pub struct AgentPanel {
|
||||
agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
agent_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
agent_navigation_menu: Option<Entity<ContextMenu>>,
|
||||
_extension_subscription: Option<Subscription>,
|
||||
width: Option<Pixels>,
|
||||
height: Option<Pixels>,
|
||||
zoomed: bool,
|
||||
@@ -632,7 +656,24 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
Self {
|
||||
// Subscribe to extension events to sync agent servers when extensions change
|
||||
let extension_subscription = if let Some(extension_events) = ExtensionEvents::try_global(cx)
|
||||
{
|
||||
Some(
|
||||
cx.subscribe(&extension_events, |this, _source, event, cx| match event {
|
||||
extension::Event::ExtensionInstalled(_)
|
||||
| extension::Event::ExtensionUninstalled(_)
|
||||
| extension::Event::ExtensionsInstalledChanged => {
|
||||
this.sync_agent_servers_from_extensions(cx);
|
||||
}
|
||||
_ => {}
|
||||
}),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut panel = Self {
|
||||
active_view,
|
||||
workspace,
|
||||
user_store,
|
||||
@@ -650,6 +691,7 @@ impl AgentPanel {
|
||||
agent_panel_menu_handle: PopoverMenuHandle::default(),
|
||||
agent_navigation_menu_handle: PopoverMenuHandle::default(),
|
||||
agent_navigation_menu: None,
|
||||
_extension_subscription: extension_subscription,
|
||||
width: None,
|
||||
height: None,
|
||||
zoomed: false,
|
||||
@@ -659,7 +701,11 @@ impl AgentPanel {
|
||||
history_store,
|
||||
selected_agent: AgentType::default(),
|
||||
loading: false,
|
||||
}
|
||||
};
|
||||
|
||||
// Initial sync of agent servers from extensions
|
||||
panel.sync_agent_servers_from_extensions(cx);
|
||||
panel
|
||||
}
|
||||
|
||||
pub fn toggle_focus(
|
||||
@@ -692,6 +738,25 @@ impl AgentPanel {
|
||||
&self.context_server_registry
|
||||
}
|
||||
|
||||
pub fn is_hidden(workspace: &Entity<Workspace>, cx: &App) -> bool {
|
||||
let workspace_read = workspace.read(cx);
|
||||
|
||||
workspace_read
|
||||
.panel::<AgentPanel>(cx)
|
||||
.map(|panel| {
|
||||
let panel_id = Entity::entity_id(&panel);
|
||||
|
||||
let is_visible = workspace_read.all_docks().iter().any(|dock| {
|
||||
dock.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|visible_panel| visible_panel.panel_id() == panel_id)
|
||||
});
|
||||
|
||||
!is_visible
|
||||
})
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
fn active_thread_view(&self) -> Option<&Entity<AcpThreadView>> {
|
||||
match &self.active_view {
|
||||
ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view),
|
||||
@@ -888,6 +953,15 @@ impl AgentPanel {
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(thread_view) = self.active_thread_view() {
|
||||
thread_view.update(cx, |view, cx| {
|
||||
view.expand_message_editor(&ExpandMessageEditor, window, cx);
|
||||
view.focus_handle(cx).focus(window);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn open_history(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if matches!(self.active_view, ActiveView::History) {
|
||||
if let Some(previous_view) = self.previous_view.take() {
|
||||
@@ -1031,13 +1105,21 @@ impl AgentPanel {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, cx| {
|
||||
let agent_ui_font_size =
|
||||
ThemeSettings::get_global(cx).agent_ui_font_size(cx) + delta;
|
||||
let agent_buffer_font_size =
|
||||
ThemeSettings::get_global(cx).agent_buffer_font_size(cx) + delta;
|
||||
|
||||
let _ = settings
|
||||
.theme
|
||||
.agent_ui_font_size
|
||||
.insert(theme::clamp_font_size(agent_ui_font_size).into());
|
||||
let _ = settings
|
||||
.theme
|
||||
.agent_buffer_font_size
|
||||
.insert(theme::clamp_font_size(agent_buffer_font_size).into());
|
||||
});
|
||||
} else {
|
||||
theme::adjust_agent_ui_font_size(cx, |size| size + delta);
|
||||
theme::adjust_agent_buffer_font_size(cx, |size| size + delta);
|
||||
}
|
||||
}
|
||||
WhichFontSize::BufferFont => {
|
||||
@@ -1058,12 +1140,19 @@ impl AgentPanel {
|
||||
if action.persist {
|
||||
update_settings_file(self.fs.clone(), cx, move |settings, _| {
|
||||
settings.theme.agent_ui_font_size = None;
|
||||
settings.theme.agent_buffer_font_size = None;
|
||||
});
|
||||
} else {
|
||||
theme::reset_agent_ui_font_size(cx);
|
||||
theme::reset_agent_buffer_font_size(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_agent_zoom(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
theme::reset_agent_ui_font_size(cx);
|
||||
theme::reset_agent_buffer_font_size(cx);
|
||||
}
|
||||
|
||||
pub fn toggle_zoom(&mut self, _: &ToggleZoom, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.zoomed {
|
||||
cx.emit(PanelEvent::ZoomOut);
|
||||
@@ -1309,6 +1398,31 @@ impl AgentPanel {
|
||||
self.selected_agent.clone()
|
||||
}
|
||||
|
||||
fn sync_agent_servers_from_extensions(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(extension_store) = ExtensionStore::try_global(cx) {
|
||||
let (manifests, extensions_dir) = {
|
||||
let store = extension_store.read(cx);
|
||||
let installed = store.installed_extensions();
|
||||
let manifests: Vec<_> = installed
|
||||
.iter()
|
||||
.map(|(id, entry)| (id.clone(), entry.manifest.clone()))
|
||||
.collect();
|
||||
let extensions_dir = paths::extensions_dir().join("installed");
|
||||
(manifests, extensions_dir)
|
||||
};
|
||||
|
||||
self.project.update(cx, |project, cx| {
|
||||
project.agent_server_store().update(cx, |store, cx| {
|
||||
let manifest_refs: Vec<_> = manifests
|
||||
.iter()
|
||||
.map(|(id, manifest)| (id.as_ref(), manifest.as_ref()))
|
||||
.collect();
|
||||
store.sync_extension_agents(manifest_refs, extensions_dir, cx);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_agent_thread(
|
||||
&mut self,
|
||||
agent: AgentType,
|
||||
@@ -1666,10 +1780,9 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.action("Add Custom Server…", Box::new(AddContextServer))
|
||||
.separator();
|
||||
|
||||
menu = menu
|
||||
.separator()
|
||||
.action("Rules", Box::new(OpenRulesLibrary::default()))
|
||||
.action("Profiles", Box::new(ManageProfiles::default()))
|
||||
.action("Settings", Box::new(OpenSettings))
|
||||
.separator()
|
||||
.action(full_screen_label, Box::new(ToggleZoom));
|
||||
@@ -1744,6 +1857,16 @@ impl AgentPanel {
|
||||
let agent_server_store = self.project.read(cx).agent_server_store().clone();
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
// Get custom icon path for selected agent before building menu (to avoid borrow issues)
|
||||
let selected_agent_custom_icon =
|
||||
if let AgentType::Custom { name, .. } = &self.selected_agent {
|
||||
agent_server_store
|
||||
.read(cx)
|
||||
.agent_icon(&ExternalAgentServerName(name.clone()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let active_thread = match &self.active_view {
|
||||
ActiveView::ExternalAgentThread { thread_view } => {
|
||||
thread_view.read(cx).as_native_thread(cx)
|
||||
@@ -1758,7 +1881,7 @@ impl AgentPanel {
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |_window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"New…",
|
||||
"New Thread…",
|
||||
&ToggleNewThreadMenu,
|
||||
&focus_handle,
|
||||
cx,
|
||||
@@ -1781,8 +1904,7 @@ impl AgentPanel {
|
||||
|
||||
let active_thread = active_thread.clone();
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, cx| {
|
||||
menu
|
||||
.context(focus_handle.clone())
|
||||
menu.context(focus_handle.clone())
|
||||
.header("Zed Agent")
|
||||
.when_some(active_thread, |this, active_thread| {
|
||||
let thread = active_thread.read(cx);
|
||||
@@ -1861,7 +1983,7 @@ impl AgentPanel {
|
||||
.separator()
|
||||
.header("External Agents")
|
||||
.item(
|
||||
ContextMenuEntry::new("New Claude Code Thread")
|
||||
ContextMenuEntry::new("New Claude Code")
|
||||
.icon(IconName::AiClaude)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -1887,7 +2009,7 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
ContextMenuEntry::new("New Codex CLI")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -1913,7 +2035,7 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
ContextMenuEntry::new("New Gemini CLI")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
@@ -1939,77 +2061,110 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.map(|mut menu| {
|
||||
let agent_names = agent_server_store
|
||||
.read(cx)
|
||||
let agent_server_store_read = agent_server_store.read(cx);
|
||||
let agent_names = agent_server_store_read
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
|
||||
name.0 != GEMINI_NAME
|
||||
&& name.0 != CLAUDE_CODE_NAME
|
||||
&& name.0 != CODEX_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let custom_settings = cx.global::<SettingsStore>().get::<AllAgentServersSettings>(None).custom.clone();
|
||||
let custom_settings = cx
|
||||
.global::<SettingsStore>()
|
||||
.get::<AllAgentServersSettings>(None)
|
||||
.custom
|
||||
.clone();
|
||||
for agent_name in agent_names {
|
||||
menu = menu.item(
|
||||
ContextMenuEntry::new(format!("New {} Thread", agent_name))
|
||||
.icon(IconName::Terminal)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
let agent_name = agent_name.clone();
|
||||
let custom_settings = custom_settings.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Custom {
|
||||
name: agent_name.clone().into(),
|
||||
command: custom_settings
|
||||
.get(&agent_name.0)
|
||||
.map(|settings| {
|
||||
settings.command.clone()
|
||||
})
|
||||
.unwrap_or(placeholder_command()),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
let icon_path = agent_server_store_read.agent_icon(&agent_name);
|
||||
let mut entry =
|
||||
ContextMenuEntry::new(format!("New {}", agent_name));
|
||||
if let Some(icon_path) = icon_path {
|
||||
entry = entry.custom_icon_path(icon_path);
|
||||
} else {
|
||||
entry = entry.icon(IconName::Terminal);
|
||||
}
|
||||
entry = entry
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
let agent_name = agent_name.clone();
|
||||
let custom_settings = custom_settings.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Custom {
|
||||
name: agent_name
|
||||
.clone()
|
||||
.into(),
|
||||
command: custom_settings
|
||||
.get(&agent_name.0)
|
||||
.map(|settings| {
|
||||
settings
|
||||
.command
|
||||
.clone()
|
||||
})
|
||||
.unwrap_or(
|
||||
placeholder_command(
|
||||
),
|
||||
),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
menu = menu.item(entry);
|
||||
}
|
||||
|
||||
menu
|
||||
})
|
||||
.separator().link(
|
||||
"Add Other Agents",
|
||||
OpenBrowser {
|
||||
url: zed_urls::external_agents_docs(cx),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
.separator()
|
||||
.link(
|
||||
"Add Other Agents",
|
||||
OpenBrowser {
|
||||
url: zed_urls::external_agents_docs(cx),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
let selected_agent_label = self.selected_agent.label();
|
||||
|
||||
let has_custom_icon = selected_agent_custom_icon.is_some();
|
||||
let selected_agent = div()
|
||||
.id("selected_agent_icon")
|
||||
.when_some(self.selected_agent.icon(), |this, icon| {
|
||||
.when_some(selected_agent_custom_icon, |this, icon_path| {
|
||||
let label = selected_agent_label.clone();
|
||||
this.px(DynamicSpacing::Base02.rems(cx))
|
||||
.child(Icon::new(icon).color(Color::Muted))
|
||||
.child(Icon::from_path(icon_path).color(Color::Muted))
|
||||
.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta(selected_agent_label.clone(), None, "Selected Agent", cx)
|
||||
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
|
||||
})
|
||||
})
|
||||
.when(!has_custom_icon, |this| {
|
||||
this.when_some(self.selected_agent.icon(), |this, icon| {
|
||||
let label = selected_agent_label.clone();
|
||||
this.px(DynamicSpacing::Base02.rems(cx))
|
||||
.child(Icon::new(icon).color(Color::Muted))
|
||||
.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
|
||||
})
|
||||
})
|
||||
})
|
||||
.into_any_element();
|
||||
|
||||
h_flex()
|
||||
|
||||
@@ -620,8 +620,18 @@ impl TextThreadContextHandle {
|
||||
|
||||
impl Display for TextThreadContext {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
// TODO: escape title?
|
||||
writeln!(f, "<text_thread title=\"{}\">", self.title)?;
|
||||
write!(f, "<text_thread title=\"")?;
|
||||
for c in self.title.chars() {
|
||||
match c {
|
||||
'&' => write!(f, "&")?,
|
||||
'<' => write!(f, "<")?,
|
||||
'>' => write!(f, ">")?,
|
||||
'"' => write!(f, """)?,
|
||||
'\'' => write!(f, "'")?,
|
||||
_ => write!(f, "{}", c)?,
|
||||
}
|
||||
}
|
||||
writeln!(f, "\">")?;
|
||||
write!(f, "{}", self.text.trim())?;
|
||||
write!(f, "\n</text_thread>")
|
||||
}
|
||||
|
||||
@@ -662,6 +662,7 @@ pub(crate) fn recent_context_picker_entries(
|
||||
let mut recent = Vec::with_capacity(6);
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let include_root_name = workspace.visible_worktrees(cx).count() > 1;
|
||||
|
||||
recent.extend(
|
||||
workspace
|
||||
@@ -675,9 +676,16 @@ pub(crate) fn recent_context_picker_entries(
|
||||
.filter_map(|(project_path, _)| {
|
||||
project
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|worktree| RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
.map(|worktree| {
|
||||
let path_prefix = if include_root_name {
|
||||
worktree.read(cx).root_name().into()
|
||||
} else {
|
||||
RelPath::empty().into()
|
||||
};
|
||||
RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix,
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -655,13 +655,12 @@ impl ContextPickerCompletionProvider {
|
||||
let SymbolLocation::InProject(symbol_path) = &symbol.path else {
|
||||
return None;
|
||||
};
|
||||
let path_prefix = workspace
|
||||
let _path_prefix = workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(symbol_path.worktree_id, cx)?
|
||||
.read(cx)
|
||||
.root_name();
|
||||
.worktree_for_id(symbol_path.worktree_id, cx)?;
|
||||
let path_prefix = RelPath::empty();
|
||||
|
||||
let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory(
|
||||
&symbol_path.path,
|
||||
@@ -818,9 +817,21 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
return None;
|
||||
}
|
||||
|
||||
// If path is empty, this means we're matching with the root directory itself
|
||||
// so we use the path_prefix as the name
|
||||
let path_prefix = if mat.path.is_empty() {
|
||||
project
|
||||
.read(cx)
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|wt| wt.read(cx).root_name().into())
|
||||
.unwrap_or_else(|| mat.path_prefix.clone())
|
||||
} else {
|
||||
mat.path_prefix.clone()
|
||||
};
|
||||
|
||||
Some(Self::completion_for_path(
|
||||
project_path,
|
||||
&mat.path_prefix,
|
||||
&path_prefix,
|
||||
is_recent,
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
@@ -1309,10 +1320,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("seven.txt dir{slash}b{slash}"),
|
||||
format!("six.txt dir{slash}b{slash}"),
|
||||
format!("five.txt dir{slash}b{slash}"),
|
||||
format!("four.txt dir{slash}a{slash}"),
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
format!("four.txt a{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into(),
|
||||
"Fetch".into()
|
||||
@@ -1344,7 +1355,7 @@ mod tests {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt dir{slash}a{slash}")]
|
||||
vec![format!("one.txt a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1356,12 +1367,12 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ")
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
vec![Point::new(0, 6)..Point::new(0, 33)]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1370,12 +1381,12 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) ")
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
vec![Point::new(0, 6)..Point::new(0, 33)]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1384,12 +1395,12 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum "),
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum "),
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
vec![Point::new(0, 6)..Point::new(0, 33)]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1398,12 +1409,12 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum @file "),
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum @file "),
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
vec![Point::new(0, 6)..Point::new(0, 33)]
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1416,14 +1427,14 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) ")
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 45)..Point::new(0, 80)
|
||||
Point::new(0, 6)..Point::new(0, 33),
|
||||
Point::new(0, 41)..Point::new(0, 72)
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -1433,14 +1444,14 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n@")
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n@")
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 45)..Point::new(0, 80)
|
||||
Point::new(0, 6)..Point::new(0, 33),
|
||||
Point::new(0, 41)..Point::new(0, 72)
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -1454,20 +1465,203 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem [@one.txt](@file:dir{slash}a{slash}one.txt) Ipsum [@seven.txt](@file:dir{slash}b{slash}seven.txt) \n[@six.txt](@file:dir{slash}b{slash}six.txt) ")
|
||||
format!("Lorem [@one.txt](@file:a{slash}one.txt) Ipsum [@seven.txt](@file:b{slash}seven.txt) \n[@six.txt](@file:b{slash}six.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 45)..Point::new(0, 80),
|
||||
Point::new(1, 0)..Point::new(1, 31)
|
||||
Point::new(0, 6)..Point::new(0, 33),
|
||||
Point::new(0, 41)..Point::new(0, 72),
|
||||
Point::new(1, 0)..Point::new(1, 27)
|
||||
]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_context_completion_provider_multiple_worktrees(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/project1"),
|
||||
json!({
|
||||
"a": {
|
||||
"one.txt": "",
|
||||
"two.txt": "",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/project2"),
|
||||
json!({
|
||||
"b": {
|
||||
"three.txt": "",
|
||||
"four.txt": "",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(
|
||||
app_state.fs.clone(),
|
||||
[path!("/project1").as_ref(), path!("/project2").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let workspace = window.root(cx).unwrap();
|
||||
|
||||
let worktrees = project.update(cx, |project, cx| {
|
||||
let worktrees = project.worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 2);
|
||||
worktrees
|
||||
});
|
||||
|
||||
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
|
||||
let slash = PathStyle::local().separator();
|
||||
|
||||
for (worktree_idx, paths) in [
|
||||
vec![rel_path("a/one.txt"), rel_path("a/two.txt")],
|
||||
vec![rel_path("b/three.txt"), rel_path("b/four.txt")],
|
||||
]
|
||||
.iter()
|
||||
.enumerate()
|
||||
{
|
||||
let worktree_id = worktrees[worktree_idx].read_with(&cx, |wt, _| wt.id());
|
||||
for path in paths {
|
||||
workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: (*path).into(),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
Editor::new(
|
||||
editor::EditorMode::full(),
|
||||
multi_buffer::MultiBuffer::build_simple("", cx),
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| AtMentionEditor(editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
editor
|
||||
});
|
||||
|
||||
let context_store = cx.new(|_| ContextStore::new(project.downgrade()));
|
||||
|
||||
let editor_entity = editor.downgrade();
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
window.focus(&editor.focus_handle(cx));
|
||||
editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new(
|
||||
workspace.downgrade(),
|
||||
context_store.downgrade(),
|
||||
None,
|
||||
None,
|
||||
editor_entity,
|
||||
None,
|
||||
))));
|
||||
});
|
||||
|
||||
cx.simulate_input("@");
|
||||
|
||||
// With multiple worktrees, we should see the project name as prefix
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "@");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
let labels = current_completion_labels(editor);
|
||||
|
||||
assert!(
|
||||
labels.contains(&format!("four.txt project2{slash}b{slash}")),
|
||||
"Expected 'four.txt project2{slash}b{slash}' in labels: {:?}",
|
||||
labels
|
||||
);
|
||||
assert!(
|
||||
labels.contains(&format!("three.txt project2{slash}b{slash}")),
|
||||
"Expected 'three.txt project2{slash}b{slash}' in labels: {:?}",
|
||||
labels
|
||||
);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx);
|
||||
editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx);
|
||||
editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx);
|
||||
editor.context_menu_next(&editor::actions::ContextMenuNext, window, cx);
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "@file ");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
});
|
||||
|
||||
cx.simulate_input("one");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "@file one");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt project1{slash}a{slash}")]
|
||||
);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("[@one.txt](@file:project1{slash}a{slash}one.txt) ")
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
});
|
||||
}
|
||||
|
||||
fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec<Range<Point>> {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
|
||||
@@ -197,34 +197,50 @@ pub(crate) fn search_files(
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let visible_worktrees = workspace.visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let include_root_name = visible_worktrees.len() > 1;
|
||||
|
||||
let recent_matches = workspace
|
||||
.recent_navigation_history(Some(10), cx)
|
||||
.into_iter()
|
||||
.filter_map(|(project_path, _)| {
|
||||
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
Some(FileMatch {
|
||||
.map(|(project_path, _)| {
|
||||
let path_prefix = if include_root_name {
|
||||
project
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|wt| wt.read(cx).root_name().into())
|
||||
.unwrap_or_else(|| RelPath::empty().into())
|
||||
} else {
|
||||
RelPath::empty().into()
|
||||
};
|
||||
|
||||
FileMatch {
|
||||
mat: PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
path_prefix,
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
},
|
||||
is_recent: true,
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let file_matches = visible_worktrees.into_iter().flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<RelPath> = if include_root_name {
|
||||
worktree.root_name().into()
|
||||
} else {
|
||||
RelPath::empty().into()
|
||||
};
|
||||
worktree.entries(false, 0).map(move |entry| FileMatch {
|
||||
mat: PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: worktree.root_name().into(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: entry.is_dir(),
|
||||
},
|
||||
@@ -235,6 +251,7 @@ pub(crate) fn search_files(
|
||||
Task::ready(recent_matches.chain(file_matches).collect())
|
||||
} else {
|
||||
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let include_root_name = worktrees.len() > 1;
|
||||
let candidate_sets = worktrees
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
@@ -243,7 +260,7 @@ pub(crate) fn search_files(
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree.root_entry().is_some_and(|entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
include_root_name,
|
||||
candidates: project::Candidates::Entries,
|
||||
}
|
||||
})
|
||||
@@ -276,6 +293,12 @@ pub fn extract_file_name_and_directory(
|
||||
path_prefix: &RelPath,
|
||||
path_style: PathStyle,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
// If path is empty, this means we're matching with the root directory itself
|
||||
// so we use the path_prefix as the name
|
||||
if path.is_empty() && !path_prefix.is_empty() {
|
||||
return (path_prefix.display(path_style).to_string().into(), None);
|
||||
}
|
||||
|
||||
let full_path = path_prefix.join(path);
|
||||
let file_name = full_path.file_name().unwrap_or_default();
|
||||
let display_path = full_path.display(path_style);
|
||||
|
||||
@@ -260,10 +260,10 @@ impl<T: 'static> PromptEditor<T> {
|
||||
|
||||
let agent_panel_keybinding =
|
||||
ui::text_for_action(&zed_actions::assistant::ToggleFocus, window, cx)
|
||||
.map(|keybinding| format!("{keybinding} to chat ― "))
|
||||
.map(|keybinding| format!("{keybinding} to chat"))
|
||||
.unwrap_or_default();
|
||||
|
||||
format!("{action}… ({agent_panel_keybinding}↓↑ for history)")
|
||||
format!("{action}… ({agent_panel_keybinding} ― ↓↑ for history — @ to include context)")
|
||||
}
|
||||
|
||||
pub fn prompt(&self, cx: &App) -> String {
|
||||
|
||||
@@ -477,7 +477,7 @@ impl TextThreadEditor {
|
||||
editor.insert(&format!("/{name}"), window, cx);
|
||||
if command.accepts_arguments() {
|
||||
editor.insert(" ", window, cx);
|
||||
editor.show_completions(&ShowCompletions::default(), window, cx);
|
||||
editor.show_completions(&ShowCompletions, window, cx);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -2591,11 +2591,12 @@ impl SearchableItem for TextThreadEditor {
|
||||
&mut self,
|
||||
index: usize,
|
||||
matches: &[Self::Match],
|
||||
collapse: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.activate_match(index, matches, window, cx);
|
||||
editor.activate_match(index, matches, collapse, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ use futures::{
|
||||
};
|
||||
use gpui::{AsyncApp, BackgroundExecutor, Task};
|
||||
use smol::fs;
|
||||
use util::{ResultExt as _, debug_panic, maybe, paths::PathExt};
|
||||
use util::{ResultExt as _, debug_panic, maybe, paths::PathExt, shell::ShellKind};
|
||||
|
||||
/// Path to the program used for askpass
|
||||
///
|
||||
@@ -199,9 +199,15 @@ impl PasswordProxy {
|
||||
let current_exec =
|
||||
std::env::current_exe().context("Failed to determine current zed executable path.")?;
|
||||
|
||||
// TODO: inferred from the use of powershell.exe in askpass_helper_script
|
||||
let shell_kind = if cfg!(windows) {
|
||||
ShellKind::PowerShell
|
||||
} else {
|
||||
ShellKind::Posix
|
||||
};
|
||||
let askpass_program = ASKPASS_PROGRAM
|
||||
.get_or_init(|| current_exec)
|
||||
.try_shell_safe()
|
||||
.try_shell_safe(shell_kind)
|
||||
.context("Failed to shell-escape Askpass program path.")?
|
||||
.to_string();
|
||||
// Create an askpass script that communicates back to this process.
|
||||
@@ -343,7 +349,7 @@ fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Pa
|
||||
format!(
|
||||
r#"
|
||||
$ErrorActionPreference = 'Stop';
|
||||
($args -join [char]0) | & "{askpass_program}" --askpass={askpass_socket} 2> $null
|
||||
($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null
|
||||
"#,
|
||||
askpass_socket = askpass_socket.display(),
|
||||
)
|
||||
|
||||
@@ -26,6 +26,7 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||
|
||||
@@ -331,6 +331,16 @@ impl AutoUpdater {
|
||||
|
||||
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
cx.spawn(async move |this, cx| {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use util::ResultExt;
|
||||
|
||||
cleanup_windows()
|
||||
.await
|
||||
.context("failed to cleanup old directories")
|
||||
.log_err();
|
||||
}
|
||||
|
||||
loop {
|
||||
this.update(cx, |this, cx| this.poll(UpdateCheckType::Automatic, cx))?;
|
||||
cx.background_executor().timer(POLL_INTERVAL).await;
|
||||
@@ -923,6 +933,32 @@ async fn install_release_macos(
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn cleanup_windows() -> Result<()> {
|
||||
use util::ResultExt;
|
||||
|
||||
let parent = std::env::current_exe()?
|
||||
.parent()
|
||||
.context("No parent dir for Zed.exe")?
|
||||
.to_owned();
|
||||
|
||||
// keep in sync with crates/auto_update_helper/src/updater.rs
|
||||
smol::fs::remove_dir(parent.join("updates"))
|
||||
.await
|
||||
.context("failed to remove updates dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("install"))
|
||||
.await
|
||||
.context("failed to remove install dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("old"))
|
||||
.await
|
||||
.context("failed to remove old version dir")
|
||||
.log_err();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
|
||||
let output = Command::new(downloaded_installer)
|
||||
.arg("/verysilent")
|
||||
@@ -962,7 +998,7 @@ pub async fn finalize_auto_update_on_quit() {
|
||||
.parent()
|
||||
.map(|p| p.join("tools").join("auto_update_helper.exe"))
|
||||
{
|
||||
let mut command = smol::process::Command::new(helper);
|
||||
let mut command = util::command::new_smol_command(helper);
|
||||
command.arg("--launch");
|
||||
command.arg("false");
|
||||
if let Ok(mut cmd) = command.spawn() {
|
||||
|
||||
@@ -21,6 +21,9 @@ simplelog.workspace = true
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||
tempfile.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.build-dependencies]
|
||||
winresource = "0.1"
|
||||
|
||||
|
||||
@@ -1,16 +1,32 @@
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3">
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true</dpiAware>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
<compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
|
||||
<application>
|
||||
<!-- Windows 10 -->
|
||||
<supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}" />
|
||||
</application>
|
||||
</compatibility>
|
||||
<application xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<windowsSettings>
|
||||
<dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true/pm</dpiAware>
|
||||
<dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness>
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
</windowsSettings>
|
||||
</application>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity type='win32'
|
||||
<assemblyIdentity
|
||||
type='win32'
|
||||
name='Microsoft.Windows.Common-Controls'
|
||||
version='6.0.0.0' processorArchitecture='*'
|
||||
publicKeyToken='6595b64144ccf1df' />
|
||||
version='6.0.0.0'
|
||||
processorArchitecture='*'
|
||||
publicKeyToken='6595b64144ccf1df'
|
||||
/>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
</assembly>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::{
|
||||
cell::LazyCell,
|
||||
path::Path,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -11,180 +12,274 @@ use windows::Win32::{
|
||||
|
||||
use crate::windows_impl::WM_JOB_UPDATED;
|
||||
|
||||
type Job = fn(&Path) -> Result<()>;
|
||||
pub(crate) struct Job {
|
||||
pub apply: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
}
|
||||
|
||||
impl Job {
|
||||
pub fn mkdir(name: &'static Path) -> Self {
|
||||
Job {
|
||||
apply: Box::new(move |app_dir| {
|
||||
let dir = app_dir.join(name);
|
||||
std::fs::create_dir_all(&dir)
|
||||
.context(format!("Failed to create directory {}", dir.display()))
|
||||
}),
|
||||
rollback: Box::new(move |app_dir| {
|
||||
let dir = app_dir.join(name);
|
||||
std::fs::remove_dir_all(&dir)
|
||||
.context(format!("Failed to remove directory {}", dir.display()))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mkdir_if_exists(name: &'static Path, check: &'static Path) -> Self {
|
||||
Job {
|
||||
apply: Box::new(move |app_dir| {
|
||||
let dir = app_dir.join(name);
|
||||
let check = app_dir.join(check);
|
||||
|
||||
if check.exists() {
|
||||
std::fs::create_dir_all(&dir)
|
||||
.context(format!("Failed to create directory {}", dir.display()))?
|
||||
}
|
||||
Ok(())
|
||||
}),
|
||||
rollback: Box::new(move |app_dir| {
|
||||
let dir = app_dir.join(name);
|
||||
|
||||
if dir.exists() {
|
||||
std::fs::remove_dir_all(&dir)
|
||||
.context(format!("Failed to remove directory {}", dir.display()))?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn move_file(filename: &'static Path, new_filename: &'static Path) -> Self {
|
||||
Job {
|
||||
apply: Box::new(move |app_dir| {
|
||||
let old_file = app_dir.join(filename);
|
||||
let new_file = app_dir.join(new_filename);
|
||||
log::info!(
|
||||
"Moving file: {}->{}",
|
||||
old_file.display(),
|
||||
new_file.display()
|
||||
);
|
||||
|
||||
std::fs::rename(&old_file, new_file)
|
||||
.context(format!("Failed to move file {}", old_file.display()))
|
||||
}),
|
||||
rollback: Box::new(move |app_dir| {
|
||||
let old_file = app_dir.join(filename);
|
||||
let new_file = app_dir.join(new_filename);
|
||||
log::info!(
|
||||
"Rolling back file move: {}->{}",
|
||||
old_file.display(),
|
||||
new_file.display()
|
||||
);
|
||||
|
||||
std::fs::rename(&new_file, &old_file).context(format!(
|
||||
"Failed to rollback file move {}->{}",
|
||||
new_file.display(),
|
||||
old_file.display()
|
||||
))
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn move_if_exists(filename: &'static Path, new_filename: &'static Path) -> Self {
|
||||
Job {
|
||||
apply: Box::new(move |app_dir| {
|
||||
let old_file = app_dir.join(filename);
|
||||
let new_file = app_dir.join(new_filename);
|
||||
|
||||
if old_file.exists() {
|
||||
log::info!(
|
||||
"Moving file: {}->{}",
|
||||
old_file.display(),
|
||||
new_file.display()
|
||||
);
|
||||
|
||||
std::fs::rename(&old_file, new_file)
|
||||
.context(format!("Failed to move file {}", old_file.display()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
rollback: Box::new(move |app_dir| {
|
||||
let old_file = app_dir.join(filename);
|
||||
let new_file = app_dir.join(new_filename);
|
||||
|
||||
if new_file.exists() {
|
||||
log::info!(
|
||||
"Rolling back file move: {}->{}",
|
||||
old_file.display(),
|
||||
new_file.display()
|
||||
);
|
||||
|
||||
std::fs::rename(&new_file, &old_file).context(format!(
|
||||
"Failed to rollback file move {}->{}",
|
||||
new_file.display(),
|
||||
old_file.display()
|
||||
))?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rmdir_nofail(filename: &'static Path) -> Self {
|
||||
Job {
|
||||
apply: Box::new(move |app_dir| {
|
||||
let filename = app_dir.join(filename);
|
||||
log::info!("Removing file: {}", filename.display());
|
||||
if let Err(e) = std::fs::remove_dir_all(&filename) {
|
||||
log::warn!("Failed to remove directory: {}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}),
|
||||
rollback: Box::new(move |app_dir| {
|
||||
let filename = app_dir.join(filename);
|
||||
anyhow::bail!(
|
||||
"Delete operations cannot be rolled back, file: {}",
|
||||
filename.display()
|
||||
)
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(not(test))]
|
||||
pub(crate) const JOBS: &[Job] = &[
|
||||
// Delete old files
|
||||
|app_dir| {
|
||||
let zed_executable = app_dir.join("Zed.exe");
|
||||
log::info!("Removing old file: {}", zed_executable.display());
|
||||
std::fs::remove_file(&zed_executable).context(format!(
|
||||
"Failed to remove old file {}",
|
||||
zed_executable.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_cli = app_dir.join("bin\\zed.exe");
|
||||
log::info!("Removing old file: {}", zed_cli.display());
|
||||
std::fs::remove_file(&zed_cli)
|
||||
.context(format!("Failed to remove old file {}", zed_cli.display()))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_wsl = app_dir.join("bin\\zed");
|
||||
log::info!("Removing old file: {}", zed_wsl.display());
|
||||
std::fs::remove_file(&zed_wsl)
|
||||
.context(format!("Failed to remove old file {}", zed_wsl.display()))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console = app_dir.join("OpenConsole.exe");
|
||||
log::info!("Removing old file: {}", open_console.display());
|
||||
std::fs::remove_file(&open_console).context(format!(
|
||||
"Failed to remove old file {}",
|
||||
open_console.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty = app_dir.join("conpty.dll");
|
||||
log::info!("Removing old file: {}", conpty.display());
|
||||
std::fs::remove_file(&conpty)
|
||||
.context(format!("Failed to remove old file {}", conpty.display()))
|
||||
},
|
||||
// Copy new files
|
||||
|app_dir| {
|
||||
let zed_executable_source = app_dir.join("install\\Zed.exe");
|
||||
let zed_executable_dest = app_dir.join("Zed.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
zed_executable_source.display(),
|
||||
zed_executable_dest.display()
|
||||
);
|
||||
std::fs::copy(&zed_executable_source, &zed_executable_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
zed_executable_source.display(),
|
||||
zed_executable_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_cli_source = app_dir.join("install\\bin\\zed.exe");
|
||||
let zed_cli_dest = app_dir.join("bin\\zed.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
zed_cli_source.display(),
|
||||
zed_cli_dest.display()
|
||||
);
|
||||
std::fs::copy(&zed_cli_source, &zed_cli_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
zed_cli_source.display(),
|
||||
zed_cli_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let zed_wsl_source = app_dir.join("install\\bin\\zed");
|
||||
let zed_wsl_dest = app_dir.join("bin\\zed");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
zed_wsl_source.display(),
|
||||
zed_wsl_dest.display()
|
||||
);
|
||||
std::fs::copy(&zed_wsl_source, &zed_wsl_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
zed_wsl_source.display(),
|
||||
zed_wsl_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let open_console_source = app_dir.join("install\\OpenConsole.exe");
|
||||
let open_console_dest = app_dir.join("OpenConsole.exe");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
);
|
||||
std::fs::copy(&open_console_source, &open_console_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
open_console_source.display(),
|
||||
open_console_dest.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let conpty_source = app_dir.join("install\\conpty.dll");
|
||||
let conpty_dest = app_dir.join("conpty.dll");
|
||||
log::info!(
|
||||
"Copying new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
);
|
||||
std::fs::copy(&conpty_source, &conpty_dest)
|
||||
.map(|_| ())
|
||||
.context(format!(
|
||||
"Failed to copy new file {} to {}",
|
||||
conpty_source.display(),
|
||||
conpty_dest.display()
|
||||
))
|
||||
},
|
||||
// Clean up installer folder and updates folder
|
||||
|app_dir| {
|
||||
let updates_folder = app_dir.join("updates");
|
||||
log::info!("Cleaning up: {}", updates_folder.display());
|
||||
std::fs::remove_dir_all(&updates_folder).context(format!(
|
||||
"Failed to remove updates folder {}",
|
||||
updates_folder.display()
|
||||
))
|
||||
},
|
||||
|app_dir| {
|
||||
let installer_folder = app_dir.join("install");
|
||||
log::info!("Cleaning up: {}", installer_folder.display());
|
||||
std::fs::remove_dir_all(&installer_folder).context(format!(
|
||||
"Failed to remove installer folder {}",
|
||||
installer_folder.display()
|
||||
))
|
||||
},
|
||||
];
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
[
|
||||
// Move old files
|
||||
// Not deleting because installing new files can fail
|
||||
Job::mkdir(p("old")),
|
||||
Job::move_file(p("Zed.exe"), p("old\\Zed.exe")),
|
||||
Job::mkdir(p("old\\bin")),
|
||||
Job::move_file(p("bin\\Zed.exe"), p("old\\bin\\Zed.exe")),
|
||||
Job::move_file(p("bin\\zed"), p("old\\bin\\zed")),
|
||||
//
|
||||
// TODO: remove after a few weeks once everyone is on the new version and this file never exists
|
||||
Job::move_if_exists(p("OpenConsole.exe"), p("old\\OpenConsole.exe")),
|
||||
Job::mkdir(p("old\\x64")),
|
||||
Job::mkdir(p("old\\arm64")),
|
||||
Job::move_if_exists(p("x64\\OpenConsole.exe"), p("old\\x64\\OpenConsole.exe")),
|
||||
Job::move_if_exists(
|
||||
p("arm64\\OpenConsole.exe"),
|
||||
p("old\\arm64\\OpenConsole.exe"),
|
||||
),
|
||||
//
|
||||
Job::move_file(p("conpty.dll"), p("old\\conpty.dll")),
|
||||
// Copy new files
|
||||
Job::move_file(p("install\\Zed.exe"), p("Zed.exe")),
|
||||
Job::move_file(p("install\\bin\\Zed.exe"), p("bin\\Zed.exe")),
|
||||
Job::move_file(p("install\\bin\\zed"), p("bin\\zed")),
|
||||
//
|
||||
Job::mkdir_if_exists(p("x64"), p("install\\x64")),
|
||||
Job::mkdir_if_exists(p("arm64"), p("install\\arm64")),
|
||||
Job::move_if_exists(
|
||||
p("install\\x64\\OpenConsole.exe"),
|
||||
p("x64\\OpenConsole.exe"),
|
||||
),
|
||||
Job::move_if_exists(
|
||||
p("install\\arm64\\OpenConsole.exe"),
|
||||
p("arm64\\OpenConsole.exe"),
|
||||
),
|
||||
//
|
||||
Job::move_file(p("install\\conpty.dll"), p("conpty.dll")),
|
||||
// Cleanup installer and updates folder
|
||||
Job::rmdir_nofail(p("updates")),
|
||||
Job::rmdir_nofail(p("install")),
|
||||
// Cleanup old installation
|
||||
Job::rmdir_nofail(p("old")),
|
||||
]
|
||||
});
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(test)]
|
||||
pub(crate) const JOBS: &[Job] = &[
|
||||
|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
];
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
[
|
||||
Job {
|
||||
apply: Box::new(|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err1" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
|
||||
"err2" => Ok(()),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}),
|
||||
rollback: Box::new(|_| {
|
||||
unsafe { std::env::set_var("ZED_AUTO_UPDATE_RB", "rollback1") };
|
||||
Ok(())
|
||||
}),
|
||||
},
|
||||
Job::mkdir(p("test1")),
|
||||
Job::mkdir_if_exists(p("test_exists"), p("test1")),
|
||||
Job::mkdir_if_exists(p("test_missing"), p("dont")),
|
||||
Job {
|
||||
apply: Box::new(|folder| {
|
||||
std::fs::write(folder.join("test1/test"), "test")?;
|
||||
Ok(())
|
||||
}),
|
||||
rollback: Box::new(|folder| {
|
||||
std::fs::remove_file(folder.join("test1/test"))?;
|
||||
Ok(())
|
||||
}),
|
||||
},
|
||||
Job::move_file(p("test1/test"), p("test1/moved")),
|
||||
Job::move_if_exists(p("test1/test"), p("test1/noop")),
|
||||
Job {
|
||||
apply: Box::new(|_| {
|
||||
std::thread::sleep(Duration::from_millis(1000));
|
||||
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
|
||||
match config.as_str() {
|
||||
"err1" => Ok(()),
|
||||
"err2" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
|
||||
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}),
|
||||
rollback: Box::new(|_| Ok(())),
|
||||
},
|
||||
Job::rmdir_nofail(p("test1/nofolder")),
|
||||
]
|
||||
});
|
||||
|
||||
pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool) -> Result<()> {
|
||||
let hwnd = hwnd.map(|ptr| HWND(ptr as _));
|
||||
|
||||
for job in JOBS.iter() {
|
||||
let mut last_successful_job = None;
|
||||
'outer: for (i, job) in JOBS.iter().enumerate() {
|
||||
let start = Instant::now();
|
||||
loop {
|
||||
anyhow::ensure!(start.elapsed().as_secs() <= 2, "Timed out");
|
||||
match (*job)(app_dir) {
|
||||
if start.elapsed().as_secs() > 2 {
|
||||
log::error!("Timed out, rolling back");
|
||||
break 'outer;
|
||||
}
|
||||
match (job.apply)(app_dir) {
|
||||
Ok(_) => {
|
||||
last_successful_job = Some(i);
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
break;
|
||||
}
|
||||
@@ -193,16 +288,39 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
|
||||
let io_err = err.downcast_ref::<std::io::Error>().unwrap();
|
||||
if io_err.kind() == std::io::ErrorKind::NotFound {
|
||||
log::warn!("File or folder not found.");
|
||||
last_successful_job = Some(i);
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
break;
|
||||
}
|
||||
|
||||
log::error!("Operation failed: {}", err);
|
||||
log::error!("Operation failed: {} ({:?})", err, io_err.kind());
|
||||
std::thread::sleep(Duration::from_millis(50));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if last_successful_job
|
||||
.map(|job| job != JOBS.len() - 1)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
let Some(last_successful_job) = last_successful_job else {
|
||||
anyhow::bail!("Autoupdate failed, nothing to rollback");
|
||||
};
|
||||
|
||||
for job in (0..=last_successful_job).rev() {
|
||||
let job = &JOBS[job];
|
||||
if let Err(e) = (job.rollback)(app_dir) {
|
||||
anyhow::bail!(
|
||||
"Job rollback failed, the app might be left in an inconsistent state: ({:?})",
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::bail!("Autoupdate failed, rollback successful");
|
||||
}
|
||||
|
||||
if launch {
|
||||
#[allow(clippy::disallowed_methods, reason = "doesn't run in the main binary")]
|
||||
let _ = std::process::Command::new(app_dir.join("Zed.exe")).spawn();
|
||||
@@ -217,12 +335,27 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_perform_update() {
|
||||
let app_dir = std::path::Path::new("C:/");
|
||||
let app_dir = tempfile::tempdir().unwrap();
|
||||
let app_dir = app_dir.path();
|
||||
assert!(perform_update(app_dir, None, false).is_ok());
|
||||
|
||||
let app_dir = tempfile::tempdir().unwrap();
|
||||
let app_dir = app_dir.path();
|
||||
// Simulate a timeout
|
||||
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err") };
|
||||
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err1") };
|
||||
let ret = perform_update(app_dir, None, false);
|
||||
assert!(ret.is_err_and(|e| e.to_string().as_str() == "Timed out"));
|
||||
assert!(
|
||||
ret.is_err_and(|e| e.to_string().as_str() == "Autoupdate failed, nothing to rollback")
|
||||
);
|
||||
|
||||
let app_dir = tempfile::tempdir().unwrap();
|
||||
let app_dir = app_dir.path();
|
||||
// Simulate a timeout
|
||||
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err2") };
|
||||
let ret = perform_update(app_dir, None, false);
|
||||
assert!(
|
||||
ret.is_err_and(|e| e.to_string().as_str() == "Autoupdate failed, rollback successful")
|
||||
);
|
||||
assert!(std::env::var("ZED_AUTO_UPDATE_RB").is_ok_and(|e| e == "rollback1"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,6 +66,8 @@ pub enum Model {
|
||||
Claude3Sonnet,
|
||||
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
|
||||
Claude3_5Haiku,
|
||||
#[serde(rename = "claude-haiku-4-5", alias = "claude-haiku-4-5-latest")]
|
||||
ClaudeHaiku4_5,
|
||||
Claude3_5Sonnet,
|
||||
Claude3Haiku,
|
||||
// Amazon Nova Models
|
||||
@@ -147,6 +149,8 @@ impl Model {
|
||||
Ok(Self::Claude3Sonnet)
|
||||
} else if id.starts_with("claude-3-5-haiku") {
|
||||
Ok(Self::Claude3_5Haiku)
|
||||
} else if id.starts_with("claude-haiku-4-5") {
|
||||
Ok(Self::ClaudeHaiku4_5)
|
||||
} else if id.starts_with("claude-3-7-sonnet") {
|
||||
Ok(Self::Claude3_7Sonnet)
|
||||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
@@ -180,6 +184,7 @@ impl Model {
|
||||
Model::Claude3Sonnet => "claude-3-sonnet",
|
||||
Model::Claude3Haiku => "claude-3-haiku",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku",
|
||||
Model::ClaudeHaiku4_5 => "claude-haiku-4-5",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking",
|
||||
Model::AmazonNovaLite => "amazon-nova-lite",
|
||||
@@ -246,6 +251,7 @@ impl Model {
|
||||
Model::Claude3Sonnet => "anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
Model::Claude3Haiku => "anthropic.claude-3-haiku-20240307-v1:0",
|
||||
Model::Claude3_5Haiku => "anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
Model::ClaudeHaiku4_5 => "anthropic.claude-haiku-4-5-20251001-v1:0",
|
||||
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => {
|
||||
"anthropic.claude-3-7-sonnet-20250219-v1:0"
|
||||
}
|
||||
@@ -309,6 +315,7 @@ impl Model {
|
||||
Self::Claude3Sonnet => "Claude 3 Sonnet",
|
||||
Self::Claude3Haiku => "Claude 3 Haiku",
|
||||
Self::Claude3_5Haiku => "Claude 3.5 Haiku",
|
||||
Self::ClaudeHaiku4_5 => "Claude Haiku 4.5",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
Self::AmazonNovaLite => "Amazon Nova Lite",
|
||||
@@ -363,6 +370,7 @@ impl Model {
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeHaiku4_5
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeOpus4
|
||||
@@ -385,7 +393,7 @@ impl Model {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
|
||||
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
|
||||
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
|
||||
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => 64_000,
|
||||
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::ClaudeHaiku4_5 => 64_000,
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeOpus4_1
|
||||
@@ -404,6 +412,7 @@ impl Model {
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeHaiku4_5
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
@@ -438,7 +447,8 @@ impl Model {
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeSonnet4_5
|
||||
| Self::ClaudeSonnet4_5Thinking
|
||||
| Self::Claude3_5Haiku => true,
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeHaiku4_5 => true,
|
||||
|
||||
// Amazon Nova models (all support tool use)
|
||||
Self::AmazonNovaPremier
|
||||
@@ -464,6 +474,7 @@ impl Model {
|
||||
// Nova models support only text caching
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html#prompt-caching-models
|
||||
Self::Claude3_5Haiku
|
||||
| Self::ClaudeHaiku4_5
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeSonnet4
|
||||
@@ -500,7 +511,7 @@ impl Model {
|
||||
min_total_token: 1024,
|
||||
}),
|
||||
|
||||
Self::Claude3_5Haiku => Some(BedrockModelCacheConfiguration {
|
||||
Self::Claude3_5Haiku | Self::ClaudeHaiku4_5 => Some(BedrockModelCacheConfiguration {
|
||||
max_cache_anchors: 4,
|
||||
min_total_token: 2048,
|
||||
}),
|
||||
@@ -569,6 +580,7 @@ impl Model {
|
||||
(
|
||||
Model::AmazonNovaPremier
|
||||
| Model::Claude3_5Haiku
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::Claude3_5SonnetV2
|
||||
| Model::Claude3_7Sonnet
|
||||
@@ -606,6 +618,7 @@ impl Model {
|
||||
// Models available in EU
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
@@ -624,6 +637,7 @@ impl Model {
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
| Model::Claude3_5SonnetV2
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet
|
||||
| Model::Claude3_7Sonnet
|
||||
|
||||
@@ -100,13 +100,21 @@ impl Render for Breadcrumbs {
|
||||
|
||||
let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs);
|
||||
|
||||
let prefix_element = active_item.breadcrumb_prefix(window, cx);
|
||||
|
||||
let breadcrumbs = if let Some(prefix) = prefix_element {
|
||||
h_flex().gap_1p5().child(prefix).child(breadcrumbs_stack)
|
||||
} else {
|
||||
breadcrumbs_stack
|
||||
};
|
||||
|
||||
match active_item
|
||||
.downcast::<Editor>()
|
||||
.map(|editor| editor.downgrade())
|
||||
{
|
||||
Some(editor) => element.child(
|
||||
ButtonLike::new("toggle outline view")
|
||||
.child(breadcrumbs_stack)
|
||||
.child(breadcrumbs)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
@@ -141,7 +149,7 @@ impl Render for Breadcrumbs {
|
||||
// Match the height and padding of the `ButtonLike` in the other arm.
|
||||
.h(rems_from_px(22.))
|
||||
.pl_1()
|
||||
.child(breadcrumbs_stack),
|
||||
.child(breadcrumbs),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1162,34 +1162,22 @@ impl BufferDiff {
|
||||
self.hunks_intersecting_range(start..end, buffer, cx)
|
||||
}
|
||||
|
||||
pub fn set_base_text_buffer(
|
||||
&mut self,
|
||||
base_buffer: Entity<language::Buffer>,
|
||||
buffer: text::BufferSnapshot,
|
||||
cx: &mut Context<Self>,
|
||||
) -> oneshot::Receiver<()> {
|
||||
let base_buffer = base_buffer.read(cx);
|
||||
let language_registry = base_buffer.language_registry();
|
||||
let base_buffer = base_buffer.snapshot();
|
||||
self.set_base_text(base_buffer, language_registry, buffer, cx)
|
||||
}
|
||||
|
||||
/// Used in cases where the change set isn't derived from git.
|
||||
pub fn set_base_text(
|
||||
&mut self,
|
||||
base_buffer: language::BufferSnapshot,
|
||||
base_text: Option<Arc<String>>,
|
||||
language: Option<Arc<Language>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
buffer: text::BufferSnapshot,
|
||||
cx: &mut Context<Self>,
|
||||
) -> oneshot::Receiver<()> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
let this = cx.weak_entity();
|
||||
let base_text = Arc::new(base_buffer.text());
|
||||
|
||||
let snapshot = BufferDiffSnapshot::new_with_base_text(
|
||||
buffer.clone(),
|
||||
Some(base_text),
|
||||
base_buffer.language().cloned(),
|
||||
base_text,
|
||||
language,
|
||||
language_registry,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -32,6 +32,7 @@ release_channel.workspace = true
|
||||
serde.workspace = true
|
||||
util.workspace = true
|
||||
tempfile.workspace = true
|
||||
rayon.workspace = true
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
|
||||
exec.workspace = true
|
||||
|
||||
@@ -356,6 +356,13 @@ fn main() -> Result<()> {
|
||||
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
|
||||
);
|
||||
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(4)
|
||||
.stack_size(10 * 1024 * 1024)
|
||||
.thread_name(|ix| format!("RayonWorker{}", ix))
|
||||
.build_global()
|
||||
.unwrap();
|
||||
|
||||
let sender: JoinHandle<anyhow::Result<()>> = thread::Builder::new()
|
||||
.name("CliReceiver".to_string())
|
||||
.spawn({
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod predict_edits_v3;
|
||||
pub mod udiff;
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -23,7 +23,11 @@ pub struct PredictEditsRequest {
|
||||
pub cursor_point: Point,
|
||||
/// Within `signatures`
|
||||
pub excerpt_parent: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub included_files: Vec<IncludedFile>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub signatures: Vec<Signature>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub referenced_declarations: Vec<ReferencedDeclaration>,
|
||||
pub events: Vec<Event>,
|
||||
#[serde(default)]
|
||||
@@ -44,6 +48,19 @@ pub struct PredictEditsRequest {
|
||||
pub prompt_format: PromptFormat,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IncludedFile {
|
||||
pub path: Arc<Path>,
|
||||
pub max_row: Line,
|
||||
pub excerpts: Vec<Excerpt>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Excerpt {
|
||||
pub start_line: Line,
|
||||
pub text: Arc<str>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum PromptFormat {
|
||||
MarkedExcerpt,
|
||||
|
||||
294
crates/cloud_llm_client/src/udiff.rs
Normal file
294
crates/cloud_llm_client/src/udiff.rs
Normal file
@@ -0,0 +1,294 @@
|
||||
use std::{borrow::Cow, fmt::Display};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum DiffLine<'a> {
|
||||
OldPath { path: Cow<'a, str> },
|
||||
NewPath { path: Cow<'a, str> },
|
||||
HunkHeader(Option<HunkLocation>),
|
||||
Context(&'a str),
|
||||
Deletion(&'a str),
|
||||
Addition(&'a str),
|
||||
Garbage(&'a str),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct HunkLocation {
|
||||
start_line_old: u32,
|
||||
count_old: u32,
|
||||
start_line_new: u32,
|
||||
count_new: u32,
|
||||
}
|
||||
|
||||
impl<'a> DiffLine<'a> {
|
||||
pub fn parse(line: &'a str) -> Self {
|
||||
Self::try_parse(line).unwrap_or(Self::Garbage(line))
|
||||
}
|
||||
|
||||
fn try_parse(line: &'a str) -> Option<Self> {
|
||||
if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
|
||||
let path = parse_header_path("a/", header);
|
||||
Some(Self::OldPath { path })
|
||||
} else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
|
||||
Some(Self::NewPath {
|
||||
path: parse_header_path("b/", header),
|
||||
})
|
||||
} else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
|
||||
if header.starts_with("...") {
|
||||
return Some(Self::HunkHeader(None));
|
||||
}
|
||||
|
||||
let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
|
||||
let mut parts = header.split_ascii_whitespace();
|
||||
let count_old = parts.next()?;
|
||||
let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
|
||||
|
||||
Some(Self::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_old: count_old.parse().ok()?,
|
||||
start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_new: count_new.parse().ok()?,
|
||||
})))
|
||||
} else if let Some(deleted_header) = line.strip_prefix("-") {
|
||||
Some(Self::Deletion(deleted_header))
|
||||
} else if line.is_empty() {
|
||||
Some(Self::Context(""))
|
||||
} else if let Some(context) = line.strip_prefix(" ") {
|
||||
Some(Self::Context(context))
|
||||
} else {
|
||||
Some(Self::Addition(line.strip_prefix("+")?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for DiffLine<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DiffLine::OldPath { path } => write!(f, "--- {path}"),
|
||||
DiffLine::NewPath { path } => write!(f, "+++ {path}"),
|
||||
DiffLine::HunkHeader(Some(hunk_location)) => {
|
||||
write!(
|
||||
f,
|
||||
"@@ -{},{} +{},{} @@",
|
||||
hunk_location.start_line_old + 1,
|
||||
hunk_location.count_old,
|
||||
hunk_location.start_line_new + 1,
|
||||
hunk_location.count_new
|
||||
)
|
||||
}
|
||||
DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
|
||||
DiffLine::Context(content) => write!(f, " {content}"),
|
||||
DiffLine::Deletion(content) => write!(f, "-{content}"),
|
||||
DiffLine::Addition(content) => write!(f, "+{content}"),
|
||||
DiffLine::Garbage(line) => write!(f, "{line}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
|
||||
if !header.contains(['"', '\\']) {
|
||||
let path = header.split_ascii_whitespace().next().unwrap_or(header);
|
||||
return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
|
||||
}
|
||||
|
||||
let mut path = String::with_capacity(header.len());
|
||||
let mut in_quote = false;
|
||||
let mut chars = header.chars().peekable();
|
||||
let mut strip_prefix = Some(strip_prefix);
|
||||
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '"' {
|
||||
in_quote = !in_quote;
|
||||
} else if char == '\\' {
|
||||
let Some(&next_char) = chars.peek() else {
|
||||
break;
|
||||
};
|
||||
chars.next();
|
||||
path.push(next_char);
|
||||
} else if char.is_ascii_whitespace() && !in_quote {
|
||||
break;
|
||||
} else {
|
||||
path.push(char);
|
||||
}
|
||||
|
||||
if let Some(prefix) = strip_prefix
|
||||
&& path == prefix
|
||||
{
|
||||
strip_prefix.take();
|
||||
path.clear();
|
||||
}
|
||||
}
|
||||
|
||||
Cow::Owned(path)
|
||||
}
|
||||
|
||||
fn eat_required_whitespace(header: &str) -> Option<&str> {
|
||||
let trimmed = header.trim_ascii_start();
|
||||
|
||||
if trimmed.len() == header.len() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use indoc::indoc;
|
||||
|
||||
#[test]
|
||||
fn parse_lines_simple() {
|
||||
let input = indoc! {"
|
||||
diff --git a/text.txt b/text.txt
|
||||
index 86c770d..a1fd855 100644
|
||||
--- a/file.txt
|
||||
+++ b/file.txt
|
||||
@@ -1,2 +1,3 @@
|
||||
context
|
||||
-deleted
|
||||
+inserted
|
||||
garbage
|
||||
|
||||
--- b/file.txt
|
||||
+++ a/file.txt
|
||||
"};
|
||||
|
||||
let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
lines,
|
||||
&[
|
||||
DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
|
||||
DiffLine::Garbage("index 86c770d..a1fd855 100644"),
|
||||
DiffLine::OldPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
DiffLine::Context("context"),
|
||||
DiffLine::Deletion("deleted"),
|
||||
DiffLine::Addition("inserted"),
|
||||
DiffLine::Garbage("garbage"),
|
||||
DiffLine::Context(""),
|
||||
DiffLine::OldPath {
|
||||
path: "b/file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "a/file.txt".into()
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_header_extra_space() {
|
||||
let options = ["--- file", "--- file", "---\tfile"];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::OldPath {
|
||||
path: "file".into()
|
||||
},
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_extra_space() {
|
||||
let options = [
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@\t-1,2\t+1,3\t@@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@ garbage",
|
||||
];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_without_location() {
|
||||
pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Extra
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt \""),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Quoted
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
// unescaped quotes are dropped
|
||||
assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
|
||||
|
||||
// Escaped
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
|
||||
"foo/\"bar\"/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
|
||||
"C:\\Projects\\My App\\old file.txt"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,14 @@
|
||||
//! Zeta2 prompt planning and generation code shared with cloud.
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use cloud_llm_client::predict_edits_v3::{self, Line, Point, PromptFormat, ReferencedDeclaration};
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::Serialize;
|
||||
use std::cmp;
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path};
|
||||
@@ -96,7 +99,195 @@ const UNIFIED_DIFF_REMINDER: &str = indoc! {"
|
||||
If you're editing multiple files, be sure to reflect filename in the hunk's header.
|
||||
"};
|
||||
|
||||
pub struct PlannedPrompt<'a> {
|
||||
pub fn build_prompt(
|
||||
request: &predict_edits_v3::PredictEditsRequest,
|
||||
) -> Result<(String, SectionLabels)> {
|
||||
let mut insertions = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => vec![
|
||||
(
|
||||
Point {
|
||||
line: request.excerpt_line_range.start,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_START_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
(request.cursor_point, CURSOR_MARKER),
|
||||
(
|
||||
Point {
|
||||
line: request.excerpt_line_range.end,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
vec![(request.cursor_point, CURSOR_MARKER)]
|
||||
}
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
};
|
||||
|
||||
let mut prompt = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
|
||||
// only intended for use via zeta_cli
|
||||
PromptFormat::OnlySnippets => String::new(),
|
||||
};
|
||||
|
||||
if request.events.is_empty() {
|
||||
prompt.push_str("(No edit history)\n\n");
|
||||
} else {
|
||||
prompt.push_str(
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n",
|
||||
);
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
if request.referenced_declarations.is_empty() {
|
||||
prompt.push_str(indoc! {"
|
||||
# File under the cursor:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
} else {
|
||||
// Note: This hasn't been trained on yet
|
||||
prompt.push_str(indoc! {"
|
||||
# Code Excerpts:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
|
||||
Context excerpts are not guaranteed to be relevant, so use your own judgement.
|
||||
Files are in their current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
}
|
||||
} else {
|
||||
prompt.push_str("\n## Code\n\n");
|
||||
}
|
||||
|
||||
let mut section_labels = Default::default();
|
||||
|
||||
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
|
||||
let syntax_based_prompt = SyntaxBasedPrompt::populate(request)?;
|
||||
section_labels = syntax_based_prompt.write(&mut insertions, &mut prompt)?;
|
||||
} else {
|
||||
if request.prompt_format == PromptFormat::LabeledSections {
|
||||
anyhow::bail!("PromptFormat::LabeledSections cannot be used with ContextMode::Llm");
|
||||
}
|
||||
|
||||
for related_file in &request.included_files {
|
||||
write_codeblock(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
request.prompt_format == PromptFormat::NumLinesUniDiff,
|
||||
&mut prompt,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
}
|
||||
|
||||
Ok((prompt, section_labels))
|
||||
}
|
||||
|
||||
pub fn write_codeblock<'a>(
|
||||
path: &Path,
|
||||
excerpts: impl IntoIterator<Item = &'a Excerpt>,
|
||||
sorted_insertions: &[(Point, &str)],
|
||||
file_line_count: Line,
|
||||
include_line_numbers: bool,
|
||||
output: &'a mut String,
|
||||
) {
|
||||
writeln!(output, "`````{}", path.display()).unwrap();
|
||||
write_excerpts(
|
||||
excerpts,
|
||||
sorted_insertions,
|
||||
file_line_count,
|
||||
include_line_numbers,
|
||||
output,
|
||||
);
|
||||
write!(output, "`````\n\n").unwrap();
|
||||
}
|
||||
|
||||
pub fn write_excerpts<'a>(
|
||||
excerpts: impl IntoIterator<Item = &'a Excerpt>,
|
||||
sorted_insertions: &[(Point, &str)],
|
||||
file_line_count: Line,
|
||||
include_line_numbers: bool,
|
||||
output: &mut String,
|
||||
) {
|
||||
let mut current_row = Line(0);
|
||||
let mut sorted_insertions = sorted_insertions.iter().peekable();
|
||||
|
||||
for excerpt in excerpts {
|
||||
if excerpt.start_line > current_row {
|
||||
writeln!(output, "…").unwrap();
|
||||
}
|
||||
if excerpt.text.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
current_row = excerpt.start_line;
|
||||
|
||||
for mut line in excerpt.text.lines() {
|
||||
if include_line_numbers {
|
||||
write!(output, "{}|", current_row.0 + 1).unwrap();
|
||||
}
|
||||
|
||||
while let Some((insertion_location, insertion_marker)) = sorted_insertions.peek() {
|
||||
match current_row.cmp(&insertion_location.line) {
|
||||
cmp::Ordering::Equal => {
|
||||
let (prefix, suffix) = line.split_at(insertion_location.column as usize);
|
||||
output.push_str(prefix);
|
||||
output.push_str(insertion_marker);
|
||||
line = suffix;
|
||||
sorted_insertions.next();
|
||||
}
|
||||
cmp::Ordering::Less => break,
|
||||
cmp::Ordering::Greater => {
|
||||
sorted_insertions.next();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
current_row.0 += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if current_row < file_line_count {
|
||||
writeln!(output, "…").unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
};
|
||||
|
||||
writeln!(output, "`````diff").unwrap();
|
||||
for event in events {
|
||||
writeln!(output, "{}", event).unwrap();
|
||||
}
|
||||
writeln!(output, "`````\n").unwrap();
|
||||
}
|
||||
|
||||
pub struct SyntaxBasedPrompt<'a> {
|
||||
request: &'a predict_edits_v3::PredictEditsRequest,
|
||||
/// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in
|
||||
/// `to_prompt_string`.
|
||||
@@ -120,13 +311,13 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
#[derive(Default, Clone, Debug, Serialize)]
|
||||
pub struct SectionLabels {
|
||||
pub excerpt_index: usize,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<Line>)>,
|
||||
}
|
||||
|
||||
impl<'a> PlannedPrompt<'a> {
|
||||
impl<'a> SyntaxBasedPrompt<'a> {
|
||||
/// Greedy one-pass knapsack algorithm to populate the prompt plan. Does the following:
|
||||
///
|
||||
/// Initializes a priority queue by populating it with each snippet, finding the
|
||||
@@ -149,7 +340,7 @@ impl<'a> PlannedPrompt<'a> {
|
||||
///
|
||||
/// * Does not include file paths / other text when considering max_bytes.
|
||||
pub fn populate(request: &'a predict_edits_v3::PredictEditsRequest) -> Result<Self> {
|
||||
let mut this = PlannedPrompt {
|
||||
let mut this = Self {
|
||||
request,
|
||||
snippets: Vec::new(),
|
||||
budget_used: request.excerpt.len(),
|
||||
@@ -354,7 +545,11 @@ impl<'a> PlannedPrompt<'a> {
|
||||
/// Renders the planned context. Each file starts with "```FILE_PATH\n` and ends with triple
|
||||
/// backticks, with a newline after each file. Outputs a line with "..." between nonconsecutive
|
||||
/// chunks.
|
||||
pub fn to_prompt_string(&'a self) -> Result<(String, SectionLabels)> {
|
||||
pub fn write(
|
||||
&'a self,
|
||||
excerpt_file_insertions: &mut Vec<(Point, &'static str)>,
|
||||
prompt: &mut String,
|
||||
) -> Result<SectionLabels> {
|
||||
let mut file_to_snippets: FxHashMap<&'a std::path::Path, Vec<&PlannedSnippet<'a>>> =
|
||||
FxHashMap::default();
|
||||
for snippet in &self.snippets {
|
||||
@@ -383,95 +578,10 @@ impl<'a> PlannedPrompt<'a> {
|
||||
excerpt_file_snippets.push(&excerpt_snippet);
|
||||
file_snippets.push((&self.request.excerpt_path, excerpt_file_snippets, true));
|
||||
|
||||
let mut excerpt_file_insertions = match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => vec![
|
||||
(
|
||||
Point {
|
||||
line: self.request.excerpt_line_range.start,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_START_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
(self.request.cursor_point, CURSOR_MARKER),
|
||||
(
|
||||
Point {
|
||||
line: self.request.excerpt_line_range.end,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections => vec![(self.request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
vec![(self.request.cursor_point, CURSOR_MARKER)]
|
||||
}
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
};
|
||||
|
||||
let mut prompt = match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
|
||||
// only intended for use via zeta_cli
|
||||
PromptFormat::OnlySnippets => String::new(),
|
||||
};
|
||||
|
||||
if self.request.events.is_empty() {
|
||||
prompt.push_str("(No edit history)\n\n");
|
||||
} else {
|
||||
prompt.push_str(
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n",
|
||||
);
|
||||
Self::push_events(&mut prompt, &self.request.events);
|
||||
}
|
||||
|
||||
if self.request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
if self.request.referenced_declarations.is_empty() {
|
||||
prompt.push_str(indoc! {"
|
||||
# File under the cursor:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
} else {
|
||||
// Note: This hasn't been trained on yet
|
||||
prompt.push_str(indoc! {"
|
||||
# Code Excerpts:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
|
||||
Context excerpts are not guaranteed to be relevant, so use your own judgement.
|
||||
Files are in their current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
}
|
||||
} else {
|
||||
prompt.push_str("\n## Code\n\n");
|
||||
}
|
||||
|
||||
let section_labels =
|
||||
self.push_file_snippets(&mut prompt, &mut excerpt_file_insertions, file_snippets)?;
|
||||
self.push_file_snippets(prompt, excerpt_file_insertions, file_snippets)?;
|
||||
|
||||
if self.request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
}
|
||||
|
||||
Ok((prompt, section_labels))
|
||||
}
|
||||
|
||||
fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
};
|
||||
|
||||
writeln!(output, "`````diff").unwrap();
|
||||
for event in events {
|
||||
writeln!(output, "{}", event).unwrap();
|
||||
}
|
||||
writeln!(output, "`````\n").unwrap();
|
||||
Ok(section_labels)
|
||||
}
|
||||
|
||||
fn push_file_snippets(
|
||||
@@ -505,8 +615,7 @@ impl<'a> PlannedPrompt<'a> {
|
||||
disjoint_snippets.push(current_snippet);
|
||||
}
|
||||
|
||||
// TODO: remove filename=?
|
||||
writeln!(output, "`````filename={}", file_path.display()).ok();
|
||||
writeln!(output, "`````path={}", file_path.display()).ok();
|
||||
let mut skipped_last_snippet = false;
|
||||
for (snippet, range) in disjoint_snippets {
|
||||
let section_index = section_ranges.len();
|
||||
|
||||
@@ -66,6 +66,14 @@ impl CodestralCompletionProvider {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
/// This is so we can immediately show Codestral as a provider users can
|
||||
/// switch to in the edit prediction menu, if the API has been added
|
||||
pub fn ensure_api_key_loaded(http_client: Arc<dyn HttpClient>, cx: &mut App) {
|
||||
MistralLanguageModelProvider::global(http_client, cx)
|
||||
.load_codestral_api_key(cx)
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn api_key(cx: &App) -> Option<Arc<str>> {
|
||||
MistralLanguageModelProvider::try_global(cx)
|
||||
.and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx))
|
||||
@@ -79,6 +87,7 @@ impl CodestralCompletionProvider {
|
||||
suffix: String,
|
||||
model: String,
|
||||
max_tokens: Option<u32>,
|
||||
api_url: String,
|
||||
) -> Result<String> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
@@ -111,7 +120,7 @@ impl CodestralCompletionProvider {
|
||||
|
||||
let http_request = http_client::Request::builder()
|
||||
.method(http_client::Method::POST)
|
||||
.uri(format!("{}/v1/fim/completions", CODESTRAL_API_URL))
|
||||
.uri(format!("{}/v1/fim/completions", api_url))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(http_client::AsyncBody::from(request_body))?;
|
||||
@@ -211,6 +220,12 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
.clone()
|
||||
.unwrap_or_else(|| "codestral-latest".to_string());
|
||||
let max_tokens = settings.edit_predictions.codestral.max_tokens;
|
||||
let api_url = settings
|
||||
.edit_predictions
|
||||
.codestral
|
||||
.api_url
|
||||
.clone()
|
||||
.unwrap_or_else(|| CODESTRAL_API_URL.to_string());
|
||||
|
||||
self.pending_request = Some(cx.spawn(async move |this, cx| {
|
||||
if debounce {
|
||||
@@ -242,6 +257,7 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
suffix,
|
||||
model,
|
||||
max_tokens,
|
||||
api_url,
|
||||
)
|
||||
.await
|
||||
{
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
---
|
||||
kind: Service
|
||||
apiVersion: v1
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest
|
||||
annotations:
|
||||
service.beta.kubernetes.io/do-loadbalancer-name: "postgrest-${ZED_KUBE_NAMESPACE}"
|
||||
service.beta.kubernetes.io/do-loadbalancer-tls-ports: "443"
|
||||
service.beta.kubernetes.io/do-loadbalancer-certificate-id: ${ZED_DO_CERTIFICATE_ID}
|
||||
service.beta.kubernetes.io/do-loadbalancer-disable-lets-encrypt-dns-records: "true"
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
selector:
|
||||
app: nginx
|
||||
ports:
|
||||
- name: web
|
||||
protocol: TCP
|
||||
port: 443
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: nginx
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:latest
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: nginx-config
|
||||
mountPath: /etc/nginx/nginx.conf
|
||||
subPath: nginx.conf
|
||||
volumes:
|
||||
- name: nginx-config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: nginx-config
|
||||
data:
|
||||
nginx.conf: |
|
||||
events {}
|
||||
|
||||
http {
|
||||
server {
|
||||
listen 8080;
|
||||
|
||||
location /app/ {
|
||||
proxy_pass http://postgrest-app:8080/;
|
||||
}
|
||||
|
||||
location /llm/ {
|
||||
proxy_pass http://postgrest-llm:8080/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-app
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-llm
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-app
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-app
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
image: "postgrest/postgrest"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: PGRST_SERVER_PORT
|
||||
value: "8080"
|
||||
- name: PGRST_DB_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: database
|
||||
key: url
|
||||
- name: PGRST_JWT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-llm
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-llm
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
image: "postgrest/postgrest"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: PGRST_SERVER_PORT
|
||||
value: "8080"
|
||||
- name: PGRST_DB_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-database
|
||||
key: url
|
||||
- name: PGRST_JWT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
@@ -467,6 +467,7 @@ CREATE TABLE extension_versions (
|
||||
provides_grammars BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_agent_servers BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
alter table extension_versions
|
||||
add column provides_agent_servers bool not null default false
|
||||
@@ -1,4 +0,0 @@
|
||||
db-uri = "postgres://postgres@localhost/zed"
|
||||
server-port = 8081
|
||||
jwt-secret = "the-postgrest-jwt-secret-for-authorization"
|
||||
log-level = "info"
|
||||
@@ -1,4 +0,0 @@
|
||||
db-uri = "postgres://postgres@localhost/zed_llm"
|
||||
server-port = 8082
|
||||
jwt-secret = "the-postgrest-jwt-secret-for-authorization"
|
||||
log-level = "info"
|
||||
@@ -310,6 +310,9 @@ impl Database {
|
||||
.provides
|
||||
.contains(&ExtensionProvides::ContextServers),
|
||||
),
|
||||
provides_agent_servers: ActiveValue::Set(
|
||||
version.provides.contains(&ExtensionProvides::AgentServers),
|
||||
),
|
||||
provides_slash_commands: ActiveValue::Set(
|
||||
version.provides.contains(&ExtensionProvides::SlashCommands),
|
||||
),
|
||||
@@ -422,6 +425,10 @@ fn apply_provides_filter(
|
||||
condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true));
|
||||
}
|
||||
|
||||
if provides_filter.contains(&ExtensionProvides::AgentServers) {
|
||||
condition = condition.add(extension_version::Column::ProvidesAgentServers.eq(true));
|
||||
}
|
||||
|
||||
if provides_filter.contains(&ExtensionProvides::SlashCommands) {
|
||||
condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true));
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ pub struct Model {
|
||||
pub provides_grammars: bool,
|
||||
pub provides_language_servers: bool,
|
||||
pub provides_context_servers: bool,
|
||||
pub provides_agent_servers: bool,
|
||||
pub provides_slash_commands: bool,
|
||||
pub provides_indexed_docs_providers: bool,
|
||||
pub provides_snippets: bool,
|
||||
@@ -57,6 +58,10 @@ impl Model {
|
||||
provides.insert(ExtensionProvides::ContextServers);
|
||||
}
|
||||
|
||||
if self.provides_agent_servers {
|
||||
provides.insert(ExtensionProvides::AgentServers);
|
||||
}
|
||||
|
||||
if self.provides_slash_commands {
|
||||
provides.insert(ExtensionProvides::SlashCommands);
|
||||
}
|
||||
|
||||
@@ -16,6 +16,72 @@ test_both_dbs!(
|
||||
test_extensions_sqlite
|
||||
);
|
||||
|
||||
test_both_dbs!(
|
||||
test_agent_servers_filter,
|
||||
test_agent_servers_filter_postgres,
|
||||
test_agent_servers_filter_sqlite
|
||||
);
|
||||
|
||||
async fn test_agent_servers_filter(db: &Arc<Database>) {
|
||||
// No extensions initially
|
||||
let versions = db.get_known_extension_versions().await.unwrap();
|
||||
assert!(versions.is_empty());
|
||||
|
||||
// Shared timestamp
|
||||
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
|
||||
let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
|
||||
|
||||
// Insert two extensions, only one provides AgentServers
|
||||
db.insert_extension_versions(
|
||||
&[
|
||||
(
|
||||
"ext_agent_servers",
|
||||
vec![NewExtensionVersion {
|
||||
name: "Agent Servers Provider".into(),
|
||||
version: semver::Version::parse("1.0.0").unwrap(),
|
||||
description: "has agent servers".into(),
|
||||
authors: vec!["author".into()],
|
||||
repository: "org/agent-servers".into(),
|
||||
schema_version: 1,
|
||||
wasm_api_version: None,
|
||||
provides: BTreeSet::from_iter([ExtensionProvides::AgentServers]),
|
||||
published_at: t0,
|
||||
}],
|
||||
),
|
||||
(
|
||||
"ext_plain",
|
||||
vec![NewExtensionVersion {
|
||||
name: "Plain Extension".into(),
|
||||
version: semver::Version::parse("0.1.0").unwrap(),
|
||||
description: "no agent servers".into(),
|
||||
authors: vec!["author2".into()],
|
||||
repository: "org/plain".into(),
|
||||
schema_version: 1,
|
||||
wasm_api_version: None,
|
||||
provides: BTreeSet::default(),
|
||||
published_at: t0,
|
||||
}],
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Filter by AgentServers provides
|
||||
let provides_filter = BTreeSet::from_iter([ExtensionProvides::AgentServers]);
|
||||
|
||||
let filtered = db
|
||||
.get_extensions(None, Some(&provides_filter), 1, 10)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Expect only the extension that declared AgentServers
|
||||
assert_eq!(filtered.len(), 1);
|
||||
assert_eq!(filtered[0].id.as_ref(), "ext_agent_servers");
|
||||
}
|
||||
|
||||
async fn test_extensions(db: &Arc<Database>) {
|
||||
let versions = db.get_known_extension_versions().await.unwrap();
|
||||
assert!(versions.is_empty());
|
||||
|
||||
@@ -347,6 +347,7 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetDefaultBranch>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::LspExtExpandMacro>)
|
||||
@@ -461,6 +462,8 @@ impl Server {
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::BreakpointsForFile>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::OpenCommitMessageBuffer>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitDiff>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetTreeDiff>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetBlobContent>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitCreateBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
|
||||
|
||||
@@ -17,12 +17,14 @@ use editor::{
|
||||
use fs::Fs;
|
||||
use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex};
|
||||
use git::repository::repo_path;
|
||||
use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
|
||||
use gpui::{
|
||||
App, Rgba, SharedString, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use language::FakeLspAdapter;
|
||||
use lsp::LSP_REQUEST_TIMEOUT;
|
||||
use project::{
|
||||
ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,
|
||||
ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,
|
||||
lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro},
|
||||
};
|
||||
use recent_projects::disconnected_overlay::DisconnectedOverlay;
|
||||
@@ -37,6 +39,7 @@ use std::{
|
||||
Arc,
|
||||
atomic::{self, AtomicBool, AtomicUsize},
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
use text::Point;
|
||||
use util::{path, rel_path::rel_path, uri};
|
||||
@@ -1283,12 +1286,14 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
});
|
||||
executor.run_until_parked();
|
||||
|
||||
let token = ProgressToken::String(SharedString::from("the-token"));
|
||||
|
||||
project_a.read_with(cx_a, |project, cx| {
|
||||
let status = project.language_server_statuses(cx).next().unwrap().1;
|
||||
assert_eq!(status.name.0, "the-language-server");
|
||||
assert_eq!(status.pending_work.len(), 1);
|
||||
assert_eq!(
|
||||
status.pending_work["the-token"].message.as_ref().unwrap(),
|
||||
status.pending_work[&token].message.as_ref().unwrap(),
|
||||
"the-message"
|
||||
);
|
||||
});
|
||||
@@ -1322,7 +1327,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
assert_eq!(status.name.0, "the-language-server");
|
||||
assert_eq!(status.pending_work.len(), 1);
|
||||
assert_eq!(
|
||||
status.pending_work["the-token"].message.as_ref().unwrap(),
|
||||
status.pending_work[&token].message.as_ref().unwrap(),
|
||||
"the-message-2"
|
||||
);
|
||||
});
|
||||
@@ -1332,7 +1337,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
assert_eq!(status.name.0, "the-language-server");
|
||||
assert_eq!(status.pending_work.len(), 1);
|
||||
assert_eq!(
|
||||
status.pending_work["the-token"].message.as_ref().unwrap(),
|
||||
status.pending_work[&token].message.as_ref().unwrap(),
|
||||
"the-message-2"
|
||||
);
|
||||
});
|
||||
@@ -1813,14 +1818,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
settings.project.all_languages.defaults.inlay_hints =
|
||||
Some(InlayHintSettingsContent {
|
||||
enabled: Some(true),
|
||||
show_value_hints: Some(true),
|
||||
edit_debounce_ms: Some(0),
|
||||
scroll_debounce_ms: Some(0),
|
||||
show_type_hints: Some(true),
|
||||
show_parameter_hints: Some(false),
|
||||
show_other_hints: Some(true),
|
||||
show_background: Some(false),
|
||||
toggle_on_modifiers_press: None,
|
||||
..InlayHintSettingsContent::default()
|
||||
})
|
||||
});
|
||||
});
|
||||
@@ -1830,15 +1828,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.all_languages.defaults.inlay_hints =
|
||||
Some(InlayHintSettingsContent {
|
||||
show_value_hints: Some(true),
|
||||
enabled: Some(true),
|
||||
edit_debounce_ms: Some(0),
|
||||
scroll_debounce_ms: Some(0),
|
||||
show_type_hints: Some(true),
|
||||
show_parameter_hints: Some(false),
|
||||
show_other_hints: Some(true),
|
||||
show_background: Some(false),
|
||||
toggle_on_modifiers_press: None,
|
||||
..InlayHintSettingsContent::default()
|
||||
})
|
||||
});
|
||||
});
|
||||
@@ -1931,6 +1922,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
let editor_a = file_a.await.unwrap().downcast::<Editor>().unwrap();
|
||||
executor.advance_clock(Duration::from_millis(100));
|
||||
executor.run_until_parked();
|
||||
|
||||
let initial_edit = edits_made.load(atomic::Ordering::Acquire);
|
||||
@@ -1951,6 +1943,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
executor.advance_clock(Duration::from_millis(100));
|
||||
executor.run_until_parked();
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(
|
||||
@@ -1969,6 +1962,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
});
|
||||
cx_b.focus(&editor_b);
|
||||
|
||||
executor.advance_clock(Duration::from_secs(1));
|
||||
executor.run_until_parked();
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
@@ -1992,6 +1986,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
});
|
||||
cx_a.focus(&editor_a);
|
||||
|
||||
executor.advance_clock(Duration::from_secs(1));
|
||||
executor.run_until_parked();
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
@@ -2013,6 +2008,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
.into_response()
|
||||
.expect("inlay refresh request failed");
|
||||
|
||||
executor.advance_clock(Duration::from_secs(1));
|
||||
executor.run_until_parked();
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
@@ -2585,7 +2581,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
capabilities: capabilities.clone(),
|
||||
initializer: Some(Box::new(move |fake_language_server| {
|
||||
let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!(
|
||||
"workspace/diagnostic-{}-1",
|
||||
"workspace/diagnostic/{}/1",
|
||||
fake_language_server.server.server_id()
|
||||
));
|
||||
let closure_workspace_diagnostics_pulls_result_ids = closure_workspace_diagnostics_pulls_result_ids.clone();
|
||||
|
||||
@@ -776,26 +776,30 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.unwrap();
|
||||
|
||||
// Clients A and B follow each other in split panes
|
||||
workspace_a.update_in(cx_a, |workspace, window, cx| {
|
||||
workspace.split_and_clone(
|
||||
workspace.active_pane().clone(),
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace_a
|
||||
.update_in(cx_a, |workspace, window, cx| {
|
||||
workspace.split_and_clone(
|
||||
workspace.active_pane().clone(),
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
workspace_a.update_in(cx_a, |workspace, window, cx| {
|
||||
workspace.follow(client_b.peer_id().unwrap(), window, cx)
|
||||
});
|
||||
executor.run_until_parked();
|
||||
workspace_b.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.split_and_clone(
|
||||
workspace.active_pane().clone(),
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
workspace_b
|
||||
.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.split_and_clone(
|
||||
workspace.active_pane().clone(),
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
workspace_b.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.follow(client_a.peer_id().unwrap(), window, cx)
|
||||
});
|
||||
@@ -1369,9 +1373,11 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
||||
);
|
||||
|
||||
// When client B activates a different pane, it continues following client A in the original pane.
|
||||
workspace_b.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, window, cx)
|
||||
});
|
||||
workspace_b
|
||||
.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, window, cx)
|
||||
})
|
||||
.await;
|
||||
assert_eq!(
|
||||
workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
Some(leader_id.into())
|
||||
|
||||
@@ -6748,7 +6748,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
||||
pane.update(cx, |pane, cx| {
|
||||
pane.split(workspace::SplitDirection::Right, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
let right_pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
|
||||
@@ -493,13 +493,17 @@ impl Item for ChannelView {
|
||||
None
|
||||
}
|
||||
|
||||
fn can_split(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_: Option<WorkspaceId>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>> {
|
||||
Some(cx.new(|cx| {
|
||||
) -> Task<Option<Entity<Self>>> {
|
||||
Task::ready(Some(cx.new(|cx| {
|
||||
Self::new(
|
||||
self.project.clone(),
|
||||
self.workspace.clone(),
|
||||
@@ -508,7 +512,7 @@ impl Item for ChannelView {
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
})))
|
||||
}
|
||||
|
||||
fn navigate(
|
||||
|
||||
@@ -54,6 +54,10 @@ actions!(
|
||||
CollapseSelectedChannel,
|
||||
/// Expands the selected channel in the tree view.
|
||||
ExpandSelectedChannel,
|
||||
/// Opens the meeting notes for the selected channel in the panel.
|
||||
///
|
||||
/// Use `collab::OpenChannelNotes` to open the channel notes for the current call.
|
||||
OpenSelectedChannelNotes,
|
||||
/// Starts moving a channel to a new location.
|
||||
StartMoveChannel,
|
||||
/// Moves the selected item to the current location.
|
||||
@@ -1265,6 +1269,13 @@ impl CollabPanel {
|
||||
window.handler_for(&this, move |this, _, cx| {
|
||||
this.copy_channel_link(channel_id, cx)
|
||||
}),
|
||||
)
|
||||
.entry(
|
||||
"Copy Channel Notes Link",
|
||||
None,
|
||||
window.handler_for(&this, move |this, _, cx| {
|
||||
this.copy_channel_notes_link(channel_id, cx)
|
||||
}),
|
||||
);
|
||||
|
||||
let mut has_destructive_actions = false;
|
||||
@@ -1849,6 +1860,17 @@ impl CollabPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn open_selected_channel_notes(
|
||||
&mut self,
|
||||
_: &OpenSelectedChannelNotes,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(channel) = self.selected_channel() {
|
||||
self.open_channel_notes(channel.id, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn set_channel_visibility(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
@@ -2220,6 +2242,15 @@ impl CollabPanel {
|
||||
cx.write_to_clipboard(item)
|
||||
}
|
||||
|
||||
fn copy_channel_notes_link(&mut self, channel_id: ChannelId, cx: &mut Context<Self>) {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let Some(channel) = channel_store.channel_for_id(channel_id) else {
|
||||
return;
|
||||
};
|
||||
let item = ClipboardItem::new_string(channel.notes_link(None, cx));
|
||||
cx.write_to_clipboard(item)
|
||||
}
|
||||
|
||||
fn render_signed_out(&mut self, cx: &mut Context<Self>) -> Div {
|
||||
let collab_blurb = "Work with your team in realtime with collaborative editing, voice, shared notes and more.";
|
||||
|
||||
@@ -2960,6 +2991,7 @@ impl Render for CollabPanel {
|
||||
.on_action(cx.listener(CollabPanel::remove_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::show_inline_context_menu))
|
||||
.on_action(cx.listener(CollabPanel::rename_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::open_selected_channel_notes))
|
||||
.on_action(cx.listener(CollabPanel::collapse_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::expand_selected_channel))
|
||||
.on_action(cx.listener(CollabPanel::start_move_selected_channel))
|
||||
|
||||
@@ -20,6 +20,7 @@ command_palette_hooks.workspace = true
|
||||
db.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
menu.workspace = true
|
||||
log.workspace = true
|
||||
picker.workspace = true
|
||||
postage.workspace = true
|
||||
|
||||
@@ -22,7 +22,7 @@ use persistence::COMMAND_PALETTE_HISTORY;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use postage::{sink::Sink, stream::Stream};
|
||||
use settings::Settings;
|
||||
use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, h_flex, prelude::*, v_flex};
|
||||
use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, prelude::*};
|
||||
use util::ResultExt;
|
||||
use workspace::{ModalView, Workspace, WorkspaceSettings};
|
||||
use zed_actions::{OpenZedUrl, command_palette::Toggle};
|
||||
@@ -143,7 +143,7 @@ impl Focusable for CommandPalette {
|
||||
}
|
||||
|
||||
impl Render for CommandPalette {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, _window: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.key_context("CommandPalette")
|
||||
.w(rems(34.))
|
||||
@@ -261,6 +261,17 @@ impl CommandPaletteDelegate {
|
||||
HashMap::new()
|
||||
}
|
||||
}
|
||||
|
||||
fn selected_command(&self) -> Option<&Command> {
|
||||
let action_ix = self
|
||||
.matches
|
||||
.get(self.selected_ix)
|
||||
.map(|m| m.candidate_id)
|
||||
.unwrap_or(self.selected_ix);
|
||||
// this gets called in headless tests where there are no commands loaded
|
||||
// so we need to return an Option here
|
||||
self.commands.get(action_ix)
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for CommandPaletteDelegate {
|
||||
@@ -411,7 +422,20 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
if secondary {
|
||||
let Some(selected_command) = self.selected_command() else {
|
||||
return;
|
||||
};
|
||||
let action_name = selected_command.action.name();
|
||||
let open_keymap = Box::new(zed_actions::ChangeKeybinding {
|
||||
action: action_name.to_string(),
|
||||
});
|
||||
window.dispatch_action(open_keymap, cx);
|
||||
self.dismissed(window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
if self.matches.is_empty() {
|
||||
self.dismissed(window, cx);
|
||||
return;
|
||||
@@ -448,6 +472,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
) -> Option<Self::ListItem> {
|
||||
let matching_command = self.matches.get(ix)?;
|
||||
let command = self.commands.get(matching_command.candidate_id)?;
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
@@ -470,6 +495,59 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_footer(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<AnyElement> {
|
||||
let selected_command = self.selected_command()?;
|
||||
let keybind =
|
||||
KeyBinding::for_action_in(&*selected_command.action, &self.previous_focus_handle, cx);
|
||||
|
||||
let focus_handle = &self.previous_focus_handle;
|
||||
let keybinding_buttons = if keybind.has_binding(window) {
|
||||
Button::new("change", "Change Keybinding…")
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(&menu::SecondaryConfirm, focus_handle, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(move |_, window, cx| {
|
||||
window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx);
|
||||
})
|
||||
} else {
|
||||
Button::new("add", "Add Keybinding…")
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(&menu::SecondaryConfirm, focus_handle, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(move |_, window, cx| {
|
||||
window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx);
|
||||
})
|
||||
};
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.p_1p5()
|
||||
.gap_1()
|
||||
.justify_end()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(keybinding_buttons)
|
||||
.child(
|
||||
Button::new("run-action", "Run")
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn humanize_action_name(name: &str) -> String {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user