Compare commits

..

4 Commits

Author SHA1 Message Date
Piotr Osiewicz
f6946ad4e8 Add missed offender 2025-10-07 14:33:21 +02:00
Piotr Osiewicz
c9972c2972 thanks @SomeoneToIgnore for saving my ass 2025-10-07 14:04:56 +02:00
Piotr Osiewicz
afdc53fdb7 agent: Cache away results of converting rules file names into relpaths 2025-10-07 13:58:12 +02:00
Piotr Osiewicz
d2e5947cf3 paths: Cache away results of static construction of RelPath
These functions started showing up in my profiles after a RelPath refactor, as RelPath::unix is not a no-op - it parses the path, which is way costlier than returning a reference to a static
2025-10-07 13:52:52 +02:00
1214 changed files with 59778 additions and 76389 deletions

View File

@@ -5,16 +5,12 @@
# Arrays are merged together though. See: https://doc.rust-lang.org/cargo/reference/config.html#hierarchical-structure # Arrays are merged together though. See: https://doc.rust-lang.org/cargo/reference/config.html#hierarchical-structure
# The intent for this file is to configure CI build process with a divergance from Zed developers experience; for example, in this config file # The intent for this file is to configure CI build process with a divergance from Zed developers experience; for example, in this config file
# we use `-D warnings` for rustflags (which makes compilation fail in presence of warnings during build process). Placing that in developers `config.toml` # we use `-D warnings` for rustflags (which makes compilation fail in presence of warnings during build process). Placing that in developers `config.toml`
# would be inconvenient. # would be incovenient.
# The reason for not using the RUSTFLAGS environment variable is that doing so would override all the settings in the config.toml file, even if the contents of the latter are completely nonsensical. See: https://github.com/rust-lang/cargo/issues/5376 # The reason for not using the RUSTFLAGS environment variable is that doing so would override all the settings in the config.toml file, even if the contents of the latter are completely nonsensical. See: https://github.com/rust-lang/cargo/issues/5376
# Here, we opted to use `[target.'cfg(all())']` instead of `[build]` because `[target.'**']` is guaranteed to be cumulative. # Here, we opted to use `[target.'cfg(all())']` instead of `[build]` because `[target.'**']` is guaranteed to be cumulative.
[target.'cfg(all())'] [target.'cfg(all())']
rustflags = ["-D", "warnings"] rustflags = ["-D", "warnings"]
# We don't need fullest debug information for dev stuff (tests etc.) in CI.
[profile.dev]
debug = "limited"
# Use Mold on Linux, because it's faster than GNU ld and LLD. # Use Mold on Linux, because it's faster than GNU ld and LLD.
# #
# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which # We no longer set this in the default `config.toml` so that developers can opt in to Wild, which

42
.config/hakari.toml Normal file
View File

@@ -0,0 +1,42 @@
# This file contains settings for `cargo hakari`.
# See https://docs.rs/cargo-hakari/latest/cargo_hakari/config for a full list of options.
hakari-package = "workspace-hack"
resolver = "2"
dep-format-version = "4"
workspace-hack-line-style = "workspace-dotted"
# this should be the same list as "targets" in ../rust-toolchain.toml
platforms = [
"x86_64-apple-darwin",
"aarch64-apple-darwin",
"x86_64-unknown-linux-gnu",
"aarch64-unknown-linux-gnu",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-musl", # remote server
]
[traversal-excludes]
workspace-members = [
"remote_server",
]
third-party = [
{ name = "reqwest", version = "0.11.27" },
# build of remote_server should not include scap / its x11 dependency
{ name = "zed-scap", git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", version = "0.0.8-zed" },
# build of remote_server should not need to include on libalsa through rodio
{ name = "rodio", git = "https://github.com/RustAudio/rodio" },
]
[final-excludes]
workspace-members = [
"zed_extension_api",
# exclude all extensions
"zed_glsl",
"zed_html",
"zed_proto",
"slash_commands_example",
"zed_test_extension",
]

View File

@@ -4,17 +4,3 @@ sequential-db-tests = { max-threads = 1 }
[[profile.default.overrides]] [[profile.default.overrides]]
filter = 'package(db)' filter = 'package(db)'
test-group = 'sequential-db-tests' test-group = 'sequential-db-tests'
# Run slowest tests first.
#
[[profile.default.overrides]]
filter = 'package(worktree) and test(test_random_worktree_changes)'
priority = 100
[[profile.default.overrides]]
filter = 'package(collab) and (test(random_project_collaboration_tests) or test(random_channel_buffer_tests) or test(test_contact_requests) or test(test_basic_following))'
priority = 99
[[profile.default.overrides]]
filter = 'package(extension_host) and test(test_extension_store_with_test_extension)'
priority = 99

View File

@@ -1,35 +0,0 @@
name: Bug Report (Git)
description: Zed Git Related Bugs
type: "Bug"
labels: ["git"]
title: "Git: <a short description of the Git bug>"
body:
- type: textarea
attributes:
label: Summary
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
value: |
<!-- Please insert a one-line summary of the issue below -->
SUMMARY_SENTENCE_HERE
### Description
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
Steps to trigger the problem:
1.
2.
3.
**Expected Behavior**:
**Actual Behavior**:
validations:
required: true
- type: textarea
id: environment
attributes:
label: Zed Version and System Specs
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
placeholder: |
Output of "zed: copy system specs into clipboard"
validations:
required: true

View File

@@ -1,8 +1,8 @@
name: Bug Report (Windows) name: Bug Report (Windows Beta)
description: Zed Windows Related Bugs description: Zed Windows Beta Related Bugs
type: "Bug" type: "Bug"
labels: ["windows"] labels: ["windows"]
title: "Windows: <a short description of the Windows bug>" title: "Windows Beta: <a short description of the Windows bug>"
body: body:
- type: textarea - type: textarea
attributes: attributes:

View File

@@ -33,10 +33,11 @@ body:
required: true required: true
- type: textarea - type: textarea
attributes: attributes:
label: If applicable, attach your `Zed.log` file to this issue. label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
description: | description: |
From the command palette, run `zed: open log` to see the last 1000 lines. macOS: `~/Library/Logs/Zed/Zed.log`
Or run `zed: reveal log in file manager` to reveal the log file itself. Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
value: | value: |
<details><summary>Zed.log</summary> <details><summary>Zed.log</summary>

View File

@@ -15,11 +15,8 @@ runs:
node-version: "18" node-version: "18"
- name: Limit target directory size - name: Limit target directory size
env:
MAX_SIZE: ${{ runner.os == 'macOS' && 300 || 100 }}
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}
# Use the variable in the run command run: script/clear-target-dir-if-larger-than 100
run: script/clear-target-dir-if-larger-than ${{ env.MAX_SIZE }}
- name: Run tests - name: Run tests
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}

View File

@@ -2,9 +2,16 @@ name: CI
on: on:
push: push:
branches:
- main
- "v[0-9]+.[0-9]+.x"
tags: tags:
- "v*" - "v*"
pull_request:
branches:
- "**"
concurrency: concurrency:
# Allow only one workflow per any non-`main` branch. # Allow only one workflow per any non-`main` branch.
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
@@ -123,6 +130,39 @@ jobs:
input: "crates/proto/proto/" input: "crates/proto/proto/"
against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/" against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"
workspace_hack:
timeout-minutes: 60
name: Check workspace-hack crate
needs: [job_spec]
if: |
github.repository_owner == 'zed-industries' &&
needs.job_spec.outputs.run_tests == 'true'
runs-on:
- namespace-profile-8x16-ubuntu-2204
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
- name: Install cargo-hakari
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2
with:
command: install
args: cargo-hakari@0.9.35
- name: Check workspace-hack Cargo.toml is up-to-date
run: |
cargo hakari generate --diff || {
echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1";
false
}
- name: Check all crates depend on workspace-hack
run: |
cargo hakari manage-deps --dry-run || {
echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1"
false
}
style: style:
timeout-minutes: 60 timeout-minutes: 60
name: Check formatting and spelling name: Check formatting and spelling
@@ -170,7 +210,7 @@ jobs:
uses: ./.github/actions/check_style uses: ./.github/actions/check_style
- name: Check for typos - name: Check for typos
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1 uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
with: with:
config: ./typos.toml config: ./typos.toml
@@ -266,12 +306,15 @@ jobs:
uses: ./.github/actions/run_tests uses: ./.github/actions/run_tests
- name: Build collab - name: Build collab
# we should do this on a linux x86 machinge
run: cargo build -p collab run: cargo build -p collab
- name: Build other binaries and features - name: Build other binaries and features
run: | run: |
cargo build --workspace --bins --examples cargo build --workspace --bins --all-features
cargo check -p gpui --features "macos-blade"
cargo check -p workspace
cargo build -p remote_server
cargo check -p gpui --examples
# Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug. # Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file - name: Clean CI config file
@@ -464,6 +507,7 @@ jobs:
- actionlint - actionlint
- migration_checks - migration_checks
# run_tests: If adding required tests, add them here and to script below. # run_tests: If adding required tests, add them here and to script below.
- workspace_hack
- linux_tests - linux_tests
- build_remote_server - build_remote_server
- macos_tests - macos_tests
@@ -489,6 +533,7 @@ jobs:
# Only check test jobs if they were supposed to run # Only check test jobs if they were supposed to run
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
[[ "${{ needs.workspace_hack.result }}" != 'success' ]] && { RET_CODE=1; echo "Workspace Hack failed"; }
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; } [[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; } [[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; } [[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
@@ -506,7 +551,9 @@ jobs:
name: Create a macOS bundle name: Create a macOS bundle
runs-on: runs-on:
- self-mini-macos - self-mini-macos
if: startsWith(github.ref, 'refs/tags/v') if: |
( startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
needs: [macos_tests] needs: [macos_tests]
env: env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
@@ -537,14 +584,16 @@ jobs:
ref: ${{ github.ref }} ref: ${{ github.ref }}
- name: Limit target directory size - name: Limit target directory size
run: script/clear-target-dir-if-larger-than 300 run: script/clear-target-dir-if-larger-than 100
- name: Determine version and release channel - name: Determine version and release channel
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: | run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV) # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel script/determine-release-channel
- name: Draft release notes - name: Draft release notes
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: | run: |
mkdir -p target/ mkdir -p target/
# Ignore any errors that occur while drafting release notes to not fail the build. # Ignore any errors that occur while drafting release notes to not fail the build.
@@ -553,17 +602,29 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Create macOS app bundle (aarch64) - name: Create macOS app bundle
run: script/bundle-mac aarch64-apple-darwin run: script/bundle-mac
- name: Create macOS app bundle (x64)
run: script/bundle-mac x86_64-apple-darwin
- name: Rename binaries - name: Rename binaries
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
run: | run: |
mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
name: Upload app bundle to release name: Upload app bundle to release
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }} if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
@@ -584,7 +645,8 @@ jobs:
runs-on: runs-on:
- namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
if: | if: |
( startsWith(github.ref, 'refs/tags/v') ) ( startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
needs: [linux_tests] needs: [linux_tests]
steps: steps:
- name: Checkout repo - name: Checkout repo
@@ -601,6 +663,7 @@ jobs:
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
- name: Determine version and release channel - name: Determine version and release channel
if: startsWith(github.ref, 'refs/tags/v')
run: | run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV) # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel script/determine-release-channel
@@ -608,8 +671,23 @@ jobs:
- name: Create Linux .tar.gz bundle - name: Create Linux .tar.gz bundle
run: script/bundle-linux run: script/bundle-linux
- name: Upload Artifact to Workflow - zed (run-bundling)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
path: target/zed-remote-server-linux-x86_64.gz
- name: Upload Artifacts to release - name: Upload Artifacts to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
with: with:
draft: true draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
@@ -626,6 +704,7 @@ jobs:
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
if: | if: |
startsWith(github.ref, 'refs/tags/v') startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
needs: [linux_tests] needs: [linux_tests]
steps: steps:
- name: Checkout repo - name: Checkout repo
@@ -642,6 +721,7 @@ jobs:
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
- name: Determine version and release channel - name: Determine version and release channel
if: startsWith(github.ref, 'refs/tags/v')
run: | run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV) # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel script/determine-release-channel
@@ -649,8 +729,23 @@ jobs:
- name: Create and upload Linux .tar.gz bundles - name: Create and upload Linux .tar.gz bundles
run: script/bundle-linux run: script/bundle-linux
- name: Upload Artifact to Workflow - zed (run-bundling)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
path: target/zed-remote-server-linux-aarch64.gz
- name: Upload Artifacts to release - name: Upload Artifacts to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
with: with:
draft: true draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
@@ -664,7 +759,8 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
runs-on: github-8vcpu-ubuntu-2404 runs-on: github-8vcpu-ubuntu-2404
if: | if: |
false && ( startsWith(github.ref, 'refs/tags/v') ) false && ( startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
needs: [linux_tests] needs: [linux_tests]
name: Build Zed on FreeBSD name: Build Zed on FreeBSD
steps: steps:
@@ -715,19 +811,24 @@ jobs:
nix-build: nix-build:
name: Build with Nix name: Build with Nix
uses: ./.github/workflows/nix_build.yml uses: ./.github/workflows/nix.yml
needs: [job_spec] needs: [job_spec]
if: github.repository_owner == 'zed-industries' && if: github.repository_owner == 'zed-industries' &&
(contains(github.event.pull_request.labels.*.name, 'run-nix') || (contains(github.event.pull_request.labels.*.name, 'run-nix') ||
needs.job_spec.outputs.run_nix == 'true') needs.job_spec.outputs.run_nix == 'true')
secrets: inherit secrets: inherit
with:
flake-output: debug
# excludes the final package to only cache dependencies
cachix-filter: "-zed-editor-[0-9.]*-nightly"
bundle-windows-x64: bundle-windows-x64:
timeout-minutes: 120 timeout-minutes: 120
name: Create a Windows installer for x86_64 name: Create a Windows installer
runs-on: [self-32vcpu-windows-2022] runs-on: [self-32vcpu-windows-2022]
if: | if: |
( startsWith(github.ref, 'refs/tags/v') ) ( startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') )
needs: [windows_tests] needs: [windows_tests]
env: env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
@@ -752,6 +853,7 @@ jobs:
- name: Determine version and release channel - name: Determine version and release channel
working-directory: ${{ env.ZED_WORKSPACE }} working-directory: ${{ env.ZED_WORKSPACE }}
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
run: | run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV) # This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel.ps1 script/determine-release-channel.ps1
@@ -760,55 +862,16 @@ jobs:
working-directory: ${{ env.ZED_WORKSPACE }} working-directory: ${{ env.ZED_WORKSPACE }}
run: script/bundle-windows.ps1 run: script/bundle-windows.ps1
- name: Upload Artifacts to release - name: Upload installer (x86_64) to Workflow - zed (run-bundling)
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
with: with:
draft: true name: ZedEditorUserSetup-x64-${{ github.event.pull_request.head.sha || github.sha }}.exe
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} path: ${{ env.SETUP_PATH }}
files: ${{ env.SETUP_PATH }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
bundle-windows-aarch64:
timeout-minutes: 120
name: Create a Windows installer for aarch64
runs-on: [self-32vcpu-windows-2022]
if: |
( startsWith(github.ref, 'refs/tags/v') )
needs: [windows_tests]
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Setup Sentry CLI
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
with:
token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
- name: Determine version and release channel
working-directory: ${{ env.ZED_WORKSPACE }}
run: |
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel.ps1
- name: Build Zed installer
working-directory: ${{ env.ZED_WORKSPACE }}
run: script/bundle-windows.ps1 -Architecture aarch64
- name: Upload Artifacts to release - name: Upload Artifacts to release
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
with: with:
draft: true draft: true
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
@@ -819,10 +882,9 @@ jobs:
auto-release-preview: auto-release-preview:
name: Auto release preview name: Auto release preview
if: | if: |
false startsWith(github.ref, 'refs/tags/v')
&& startsWith(github.ref, 'refs/tags/v')
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64, bundle-windows-aarch64] needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64]
runs-on: runs-on:
- self-mini-macos - self-mini-macos
steps: steps:

View File

@@ -38,26 +38,6 @@ jobs:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }} webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
content: ${{ steps.get-content.outputs.string }} content: ${{ steps.get-content.outputs.string }}
publish-winget:
runs-on:
- ubuntu-latest
steps:
- name: Set Package Name
id: set-package-name
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
PACKAGE_NAME=ZedIndustries.Zed.Preview
else
PACKAGE_NAME=ZedIndustries.Zed
fi
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
- uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f # v2
with:
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
max-versions-to-keep: 5
token: ${{ secrets.WINGET_TOKEN }}
send_release_notes_email: send_release_notes_email:
if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -1,13 +0,0 @@
# Generated from xtask::workflows::compare_perf
# Rebuild with `cargo xtask workflows`.
name: compare_perf
on:
workflow_dispatch: {}
jobs:
run_perf:
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false

View File

@@ -1,40 +1,42 @@
# Generated from xtask::workflows::danger name: Danger
# Rebuild with `cargo xtask workflows`.
name: danger
on: on:
pull_request: pull_request:
branches: [main]
types: types:
- opened - opened
- synchronize - synchronize
- reopened - reopened
- edited - edited
branches:
- main
jobs: jobs:
danger: danger:
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-2x4-ubuntu-2404 runs-on: namespace-profile-2x4-ubuntu-2404
steps: steps:
- name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with: - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
clean: false with:
- name: steps::setup_pnpm version: 9
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
with: - name: Setup Node
version: '9' uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
- name: steps::setup_node with:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 node-version: "20"
with: cache: "pnpm"
node-version: '20' cache-dependency-path: "script/danger/pnpm-lock.yaml"
cache: pnpm
cache-dependency-path: script/danger/pnpm-lock.yaml - run: pnpm install --dir script/danger
- name: danger::install_deps
run: pnpm install --dir script/danger - name: Run Danger
shell: bash -euxo pipefail {0} run: pnpm run --dir script/danger danger ci
- name: danger::run env:
run: pnpm run --dir script/danger danger ci # This GitHub token is not used, but the value needs to be here to prevent
shell: bash -euxo pipefail {0} # Danger from throwing an error.
env: GITHUB_TOKEN: "not_a_real_token"
GITHUB_TOKEN: not_a_real_token # All requests are instead proxied through an instance of
DANGER_GITHUB_API_BASE_URL: https://danger-proxy.fly.dev/github # https://github.com/maxdeviant/danger-proxy that allows Danger to securely
# authenticate with GitHub while still being able to run on PRs from forks.
DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github"

View File

@@ -22,8 +22,6 @@ jobs:
- name: Build docs - name: Build docs
uses: ./.github/actions/build_docs uses: ./.github/actions/build_docs
env:
DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }}
- name: Deploy Docs - name: Deploy Docs
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3

View File

@@ -49,7 +49,7 @@ jobs:
- name: Limit target directory size - name: Limit target directory size
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}
run: script/clear-target-dir-if-larger-than 300 run: script/clear-target-dir-if-larger-than 100
- name: Run tests - name: Run tests
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}

69
.github/workflows/nix.yml vendored Normal file
View File

@@ -0,0 +1,69 @@
name: "Nix build"
on:
workflow_call:
inputs:
flake-output:
type: string
default: "default"
cachix-filter:
type: string
default: ""
jobs:
nix-build:
timeout-minutes: 60
name: (${{ matrix.system.os }}) Nix Build
continue-on-error: true # TODO: remove when we want this to start blocking CI
strategy:
fail-fast: false
matrix:
system:
- os: x86 Linux
runner: namespace-profile-16x32-ubuntu-2204
install_nix: true
- os: arm Mac
runner: [macOS, ARM64, test]
install_nix: false
if: github.repository_owner == 'zed-industries'
runs-on: ${{ matrix.system.runner }}
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# on our macs we manually install nix. for some reason the cachix action is running
# under a non-login /bin/bash shell which doesn't source the proper script to add the
# nix profile to PATH, so we manually add them here
- name: Set path
if: ${{ ! matrix.system.install_nix }}
run: |
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
if: ${{ matrix.system.install_nix }}
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
with:
name: zed
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
pushFilter: "${{ inputs.cachix-filter }}"
cachixArgs: "-v"
- run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
- name: Limit /nix/store to 50GB on macs
if: ${{ ! matrix.system.install_nix }}
run: |
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
nix-collect-garbage -d || true
fi

View File

@@ -1,97 +0,0 @@
# Generated from xtask::workflows::nix_build
# Rebuild with `cargo xtask workflows`.
name: nix_build
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: '1'
CARGO_INCREMENTAL: '0'
on:
pull_request:
branches:
- '**'
paths:
- nix/**
- flake.*
- Cargo.*
- rust-toolchain.toml
- .cargo/config.toml
push:
branches:
- main
- v[0-9]+.[0-9]+.x
paths:
- nix/**
- flake.*
- Cargo.*
- rust-toolchain.toml
- .cargo/config.toml
workflow_call: {}
jobs:
build_nix_linux_x86_64:
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-32x64-ubuntu-2004
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::install_nix
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
build_nix_mac_aarch64:
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::set_path
run: |
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
shell: bash -euxo pipefail {0}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
- name: nix_build::limit_store
run: |-
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
nix-collect-garbage -d || true
fi
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true

View File

@@ -1,113 +1,93 @@
# Generated from xtask::workflows::release_nightly name: Release Nightly
# Rebuild with `cargo xtask workflows`.
name: release_nightly on:
schedule:
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
push:
tags:
- "nightly"
env: env:
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0' CARGO_INCREMENTAL: 0
RUST_BACKTRACE: '1' RUST_BACKTRACE: 1
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
on:
push:
tags:
- nightly
schedule:
- cron: 0 7 * * *
jobs: jobs:
check_style: style:
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
fetch-depth: 0
- name: steps::cargo_fmt
run: cargo fmt --all -- --check
shell: bash -euxo pipefail {0}
- name: ./script/clippy
run: ./script/clippy
shell: bash -euxo pipefail {0}
timeout-minutes: 60 timeout-minutes: 60
run_tests_mac: name: Check formatting and Clippy lints
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos runs-on:
- self-hosted
- macOS
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false clean: false
- name: steps::setup_cargo_config fetch-depth: 0
run: |
mkdir -p ./../.cargo - name: Run style checks
cp ./.cargo/ci-config.toml ./../.cargo/config.toml uses: ./.github/actions/check_style
shell: bash -euxo pipefail {0}
- name: steps::setup_node - name: Run clippy
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 run: ./script/clippy
with:
node-version: '20' tests:
- name: steps::clippy
run: ./script/clippy
shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60 timeout-minutes: 60
run_tests_windows: name: Run tests
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022 runs-on:
- self-hosted
- macOS
needs: style
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false clean: false
- name: steps::setup_cargo_config
run: | - name: Run tests
New-Item -ItemType Directory -Path "./../.cargo" -Force uses: ./.github/actions/run_tests
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
shell: pwsh windows-tests:
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::clippy
run: ./script/clippy.ps1
shell: pwsh
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: pwsh
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than.ps1 250
shell: pwsh
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: pwsh
- name: steps::cleanup_cargo_config
if: always()
run: |
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
shell: pwsh
timeout-minutes: 60 timeout-minutes: 60
bundle_mac_nightly_x86_64: name: Run tests on Windows
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos runs-on: [self-32vcpu-windows-2022]
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
- name: Configure CI
run: |
New-Item -ItemType Directory -Path "./../.cargo" -Force
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
- name: Run tests
uses: ./.github/actions/run_tests_windows
- name: Limit target directory size
run: ./script/clear-target-dir-if-larger-than.ps1 1024
- name: Clean CI config file
if: always()
run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
bundle-mac:
timeout-minutes: 60
name: Create a macOS bundle
if: github.repository_owner == 'zed-industries'
runs-on:
- self-mini-macos
needs: tests
env: env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
@@ -115,162 +95,161 @@ jobs:
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps: steps:
- name: steps::checkout_repo - name: Install Node
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with: with:
clean: false node-version: "18"
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - name: Checkout repo
with: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
node-version: '20' with:
- name: steps::setup_sentry clean: false
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with: - name: Set release channel to nightly
token: ${{ secrets.SENTRY_AUTH_TOKEN }} run: |
- name: steps::clear_target_dir_if_large set -eu
run: ./script/clear-target-dir-if-larger-than 300 version=$(git rev-parse --short HEAD)
shell: bash -euxo pipefail {0} echo "Publishing version: ${version} on release channel nightly"
- name: release_nightly::set_release_channel_to_nightly echo "nightly" > crates/zed/RELEASE_CHANNEL
run: |
set -eu - name: Setup Sentry CLI
version=$(git rev-parse --short HEAD) uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
echo "Publishing version: ${version} on release channel nightly" with:
echo "nightly" > crates/zed/RELEASE_CHANNEL token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac - name: Create macOS app bundle
run: ./script/bundle-mac x86_64-apple-darwin run: script/bundle-mac
shell: bash -euxo pipefail {0}
- name: release_nightly::upload_zed_nightly - name: Upload Zed Nightly
run: script/upload-nightly macos x86_64 run: script/upload-nightly macos
shell: bash -euxo pipefail {0}
bundle-linux-x86:
timeout-minutes: 60 timeout-minutes: 60
bundle_mac_nightly_aarch64: name: Create a Linux *.tar.gz bundle for x86
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos runs-on:
env: - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} needs: tests
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 - name: Add Rust to the PATH
with: run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH"
node-version: '20'
- name: steps::setup_sentry - name: Install Linux dependencies
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b run: ./script/linux && ./script/install-mold 2.34.0
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: Setup Sentry CLI
- name: steps::clear_target_dir_if_large uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
run: ./script/clear-target-dir-if-larger-than 300 with:
shell: bash -euxo pipefail {0} token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
- name: release_nightly::set_release_channel_to_nightly
run: | - name: Limit target directory size
set -eu run: script/clear-target-dir-if-larger-than 100
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly" - name: Set release channel to nightly
echo "nightly" > crates/zed/RELEASE_CHANNEL run: |
shell: bash -euxo pipefail {0} set -euo pipefail
- name: run_bundling::bundle_mac version=$(git rev-parse --short HEAD)
run: ./script/bundle-mac aarch64-apple-darwin echo "Publishing version: ${version} on release channel nightly"
shell: bash -euxo pipefail {0} echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly macos aarch64 - name: Create Linux .tar.gz bundle
shell: bash -euxo pipefail {0} run: script/bundle-linux
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
bundle-linux-arm:
timeout-minutes: 60 timeout-minutes: 60
bundle_linux_nightly_x86_64: name: Create a Linux *.tar.gz bundle for ARM
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-32x64-ubuntu-2004 runs-on:
- namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc
needs: tests
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b - name: Install Linux dependencies
with: run: ./script/linux
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: release_nightly::add_rust_to_path - name: Setup Sentry CLI
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
shell: bash -euxo pipefail {0} with:
- name: ./script/linux token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
run: ./script/linux
shell: bash -euxo pipefail {0} - name: Limit target directory size
- name: ./script/install-mold run: script/clear-target-dir-if-larger-than 100
run: ./script/install-mold
shell: bash -euxo pipefail {0} - name: Set release channel to nightly
- name: steps::clear_target_dir_if_large run: |
run: ./script/clear-target-dir-if-larger-than 100 set -euo pipefail
shell: bash -euxo pipefail {0} version=$(git rev-parse --short HEAD)
- name: release_nightly::set_release_channel_to_nightly echo "Publishing version: ${version} on release channel nightly"
run: | echo "nightly" > crates/zed/RELEASE_CHANNEL
set -eu
version=$(git rev-parse --short HEAD) - name: Create Linux .tar.gz bundle
echo "Publishing version: ${version} on release channel nightly" run: script/bundle-linux
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0} - name: Upload Zed Nightly
- name: ./script/bundle-linux run: script/upload-nightly linux-targz
run: ./script/bundle-linux
shell: bash -euxo pipefail {0} freebsd:
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly linux-targz x86_64
shell: bash -euxo pipefail {0}
timeout-minutes: 60 timeout-minutes: 60
bundle_linux_nightly_aarch64: if: false && github.repository_owner == 'zed-industries'
needs: runs-on: github-8vcpu-ubuntu-2404
- check_style needs: tests
- run_tests_mac name: Build Zed on FreeBSD
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
steps: steps:
- name: steps::checkout_repo - uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Build FreeBSD remote-server
with: id: freebsd-build
clean: false uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
- name: steps::setup_sentry with:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b # envs: "MYTOKEN MYTOKEN2"
with: usesh: true
token: ${{ secrets.SENTRY_AUTH_TOKEN }} release: 13.5
- name: release_nightly::add_rust_to_path copyback: true
run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" prepare: |
shell: bash -euxo pipefail {0} pkg install -y \
- name: ./script/linux bash curl jq git \
run: ./script/linux rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli
shell: bash -euxo pipefail {0} run: |
- name: steps::clear_target_dir_if_large freebsd-version
run: ./script/clear-target-dir-if-larger-than 100 sysctl hw.model
shell: bash -euxo pipefail {0} sysctl hw.ncpu
- name: release_nightly::set_release_channel_to_nightly sysctl hw.physmem
run: | sysctl hw.usermem
set -eu git config --global --add safe.directory /home/runner/work/zed/zed
version=$(git rev-parse --short HEAD) rustup-init --profile minimal --default-toolchain none -y
echo "Publishing version: ${version} on release channel nightly" . "$HOME/.cargo/env"
echo "nightly" > crates/zed/RELEASE_CHANNEL ./script/bundle-freebsd
shell: bash -euxo pipefail {0} mkdir -p out/
- name: ./script/bundle-linux mv "target/zed-remote-server-freebsd-x86_64.gz" out/
run: ./script/bundle-linux rm -rf target/
shell: bash -euxo pipefail {0} cargo clean
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly linux-targz aarch64 - name: Upload Zed Nightly
shell: bash -euxo pipefail {0} run: script/upload-nightly freebsd
bundle-nix:
name: Build and cache Nix package
needs: tests
secrets: inherit
uses: ./.github/workflows/nix.yml
bundle-windows-x64:
timeout-minutes: 60 timeout-minutes: 60
bundle_windows_nightly_x86_64: name: Create a Windows installer
needs:
- check_style
- run_tests_windows
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022 runs-on: [self-32vcpu-windows-2022]
needs: windows-tests
env: env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
@@ -280,177 +259,65 @@ jobs:
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256 FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256 TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b - name: Set release channel to nightly
with: working-directory: ${{ env.ZED_WORKSPACE }}
token: ${{ secrets.SENTRY_AUTH_TOKEN }} run: |
- name: release_nightly::set_release_channel_to_nightly $ErrorActionPreference = "Stop"
run: | $version = git rev-parse --short HEAD
$ErrorActionPreference = "Stop" Write-Host "Publishing version: $version on release channel nightly"
$version = git rev-parse --short HEAD "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
Write-Host "Publishing version: $version on release channel nightly"
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" - name: Setup Sentry CLI
shell: pwsh uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2
working-directory: ${{ env.ZED_WORKSPACE }} with:
- name: release_nightly::build_zed_installer token: ${{ SECRETS.SENTRY_AUTH_TOKEN }}
run: script/bundle-windows.ps1 -Architecture x86_64
shell: pwsh - name: Build Zed installer
working-directory: ${{ env.ZED_WORKSPACE }} working-directory: ${{ env.ZED_WORKSPACE }}
- name: release_nightly::upload_zed_nightly_windows run: script/bundle-windows.ps1
run: script/upload-nightly.ps1 -Architecture x86_64
shell: pwsh - name: Upload Zed Nightly
working-directory: ${{ env.ZED_WORKSPACE }} working-directory: ${{ env.ZED_WORKSPACE }}
timeout-minutes: 60 run: script/upload-nightly.ps1 windows
bundle_windows_nightly_aarch64:
needs: update-nightly-tag:
- check_style name: Update nightly tag
- run_tests_windows
if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: release_nightly::set_release_channel_to_nightly
run: |
$ErrorActionPreference = "Stop"
$version = git rev-parse --short HEAD
Write-Host "Publishing version: $version on release channel nightly"
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: release_nightly::build_zed_installer
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: release_nightly::upload_zed_nightly_windows
run: script/upload-nightly.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
timeout-minutes: 60
build_nix_linux_x86_64:
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-32x64-ubuntu-2004
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::install_nix
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
- name: nix_build::build
run: nix build .#default -L --accept-flake-config
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
build_nix_mac_aarch64:
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::set_path
run: |
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
shell: bash -euxo pipefail {0}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
- name: nix_build::build
run: nix build .#default -L --accept-flake-config
shell: bash -euxo pipefail {0}
- name: nix_build::limit_store
run: |-
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
nix-collect-garbage -d || true
fi
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
update_nightly_tag:
needs:
- bundle_mac_nightly_x86_64
- bundle_mac_nightly_aarch64
- bundle_linux_nightly_x86_64
- bundle_linux_nightly_aarch64
- bundle_windows_nightly_x86_64
- bundle_windows_nightly_aarch64
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-2x4-ubuntu-2404 runs-on: namespace-profile-2x4-ubuntu-2404
needs:
- bundle-mac
- bundle-linux-x86
- bundle-linux-arm
- bundle-windows-x64
steps: steps:
- name: steps::checkout_repo - name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
clean: false fetch-depth: 0
fetch-depth: 0
- name: release_nightly::update_nightly_tag - name: Update nightly tag
run: | run: |
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
echo "Nightly tag already points to current commit. Skipping tagging." echo "Nightly tag already points to current commit. Skipping tagging."
exit 0 exit 0
fi fi
git config user.name github-actions git config user.name github-actions
git config user.email github-actions@github.com git config user.email github-actions@github.com
git tag -f nightly git tag -f nightly
git push origin nightly --force git push origin nightly --force
shell: bash -euxo pipefail {0}
- name: release_nightly::create_sentry_release - name: Create Sentry release
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3
with: env:
environment: production SENTRY_ORG: zed-dev
env: SENTRY_PROJECT: zed
SENTRY_ORG: zed-dev SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_PROJECT: zed with:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} environment: production
timeout-minutes: 60

View File

@@ -1,236 +0,0 @@
# Generated from xtask::workflows::run_bundling
# Rebuild with `cargo xtask workflows`.
name: run_bundling
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
on:
pull_request:
types:
- labeled
- synchronize
jobs:
bundle_mac_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-mini-macos
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac
run: ./script/bundle-mac x86_64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed.dmg
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
path: target/zed-remote-server-macos-x86_64.gz
timeout-minutes: 60
bundle_mac_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-mini-macos
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed.dmg
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
path: target/zed-remote-server-macos-aarch64.gz
timeout-minutes: 60
bundle_linux_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-32x64-ubuntu-2004
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-remote-server-*.tar.gz
timeout-minutes: 60
bundle_linux_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-remote-server-*.tar.gz
timeout-minutes: 60
bundle_windows_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-32vcpu-windows-2022
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture x86_64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
path: ${{ env.SETUP_PATH }}
timeout-minutes: 60
bundle_windows_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-32vcpu-windows-2022
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
path: ${{ env.SETUP_PATH }}
timeout-minutes: 60
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: true

View File

@@ -1,549 +0,0 @@
# Generated from xtask::workflows::run_tests
# Rebuild with `cargo xtask workflows`.
name: run_tests
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: '1'
CARGO_INCREMENTAL: '0'
on:
pull_request:
branches:
- '**'
push:
branches:
- main
- v[0-9]+.[0-9]+.x
jobs:
orchestrate:
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
- id: filter
name: filter
run: |
if [ -z "$GITHUB_BASE_REF" ]; then
echo "Not in a PR context (i.e., push to main/stable/preview)"
COMPARE_REV="$(git rev-parse HEAD~1)"
else
echo "In a PR context comparing to pull_request.base.ref"
git fetch origin "$GITHUB_BASE_REF" --depth=350
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
fi
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
check_pattern() {
local output_name="$1"
local pattern="$2"
local grep_arg="$3"
echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
echo "${output_name}=false" >> "$GITHUB_OUTPUT"
}
check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP
check_pattern "run_docs" '^docs/' -qP
check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
shell: bash -euxo pipefail {0}
outputs:
run_action_checks: ${{ steps.filter.outputs.run_action_checks }}
run_docs: ${{ steps.filter.outputs.run_docs }}
run_licenses: ${{ steps.filter.outputs.run_licenses }}
run_nix: ${{ steps.filter.outputs.run_nix }}
run_tests: ${{ steps.filter.outputs.run_tests }}
check_style:
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-4x8-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_pnpm
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
with:
version: '9'
- name: ./script/prettier
run: ./script/prettier
shell: bash -euxo pipefail {0}
- name: ./script/check-todos
run: ./script/check-todos
shell: bash -euxo pipefail {0}
- name: ./script/check-keymaps
run: ./script/check-keymaps
shell: bash -euxo pipefail {0}
- name: run_tests::check_style::check_for_typos
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
with:
config: ./typos.toml
- name: steps::cargo_fmt
run: cargo fmt --all -- --check
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_windows:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: self-32vcpu-windows-2022
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
New-Item -ItemType Directory -Path "./../.cargo" -Force
Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
shell: pwsh
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::clippy
run: ./script/clippy.ps1
shell: pwsh
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: pwsh
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than.ps1 250
shell: pwsh
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: pwsh
- name: steps::cleanup_cargo_config
if: always()
run: |
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
shell: pwsh
timeout-minutes: 60
run_tests_linux:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::clippy
run: ./script/clippy
shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_mac:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: self-mini-macos
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::clippy
run: ./script/clippy
shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60
doctests:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- id: run_doctests
name: run_tests::doctests::run_doctests
run: |
cargo test --workspace --doc --no-fail-fast
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60
check_workspace_binaries:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: namespace-profile-8x16-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: cargo build -p collab
run: cargo build -p collab
shell: bash -euxo pipefail {0}
- name: cargo build --workspace --bins --examples
run: cargo build --workspace --bins --examples
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60
check_postgres_and_protobuf_migrations:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: self-mini-macos
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
run: git clean -df
shell: bash -euxo pipefail {0}
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
run: |
if [ -z "$GITHUB_BASE_REF" ];
then
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
else
git checkout -B temp
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
fi
shell: bash -euxo pipefail {0}
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
uses: bufbuild/buf-setup-action@v1
with:
version: v1.29.0
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
uses: bufbuild/buf-breaking-action@v1
with:
input: crates/proto/proto/
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
timeout-minutes: 60
check_dependencies:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_tests == 'true'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_tests::check_dependencies::install_cargo_machete
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
with:
command: install
args: cargo-machete@0.7.0
- name: run_tests::check_dependencies::run_cargo_machete
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
with:
command: machete
- name: run_tests::check_dependencies::check_cargo_lock
run: cargo update --locked --workspace
shell: bash -euxo pipefail {0}
- name: run_tests::check_dependencies::check_vulnerable_dependencies
if: github.event_name == 'pull_request'
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8
with:
license-check: false
timeout-minutes: 60
check_docs:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_docs == 'true'
runs-on: namespace-profile-8x16-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: run_tests::check_docs::lychee_link_check
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
with:
args: --no-progress --exclude '^http' './docs/src/**/*'
fail: true
jobSummary: false
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: run_tests::check_docs::install_mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
with:
mdbook-version: 0.4.37
- name: run_tests::check_docs::build_docs
run: |
mkdir -p target/deploy
mdbook build ./docs --dest-dir=../target/deploy/docs/
shell: bash -euxo pipefail {0}
- name: run_tests::check_docs::lychee_link_check
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
with:
args: --no-progress --exclude '^http' 'target/deploy/docs'
fail: true
jobSummary: false
timeout-minutes: 60
check_licenses:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_licenses == 'true'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: ./script/check-licenses
run: ./script/check-licenses
shell: bash -euxo pipefail {0}
- name: ./script/generate-licenses
run: ./script/generate-licenses
shell: bash -euxo pipefail {0}
check_scripts:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_action_checks == 'true'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_tests::check_scripts::run_shellcheck
run: ./script/shellcheck-scripts error
shell: bash -euxo pipefail {0}
- id: get_actionlint
name: run_tests::check_scripts::download_actionlint
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
shell: bash -euxo pipefail {0}
- name: run_tests::check_scripts::run_actionlint
run: |
${{ steps.get_actionlint.outputs.executable }} -color
shell: bash -euxo pipefail {0}
- name: run_tests::check_scripts::check_xtask_workflows
run: |
cargo xtask workflows
if ! git diff --exit-code .github; then
echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
echo "Please run 'cargo xtask workflows' locally and commit the changes"
exit 1
fi
shell: bash -euxo pipefail {0}
timeout-minutes: 60
build_nix_linux_x86_64:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_nix == 'true'
runs-on: namespace-profile-32x64-ubuntu-2004
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::install_nix
uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
build_nix_mac_aarch64:
needs:
- orchestrate
if: needs.orchestrate.outputs.run_nix == 'true'
runs-on: self-mini-macos
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: '1'
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: nix_build::set_path
run: |
echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
shell: bash -euxo pipefail {0}
- name: nix_build::cachix_action
uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
with:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
- name: nix_build::limit_store
run: |-
if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
nix-collect-garbage -d || true
fi
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
tests_pass:
needs:
- orchestrate
- check_style
- run_tests_windows
- run_tests_linux
- run_tests_mac
- doctests
- check_workspace_binaries
- check_postgres_and_protobuf_migrations
- check_dependencies
- check_docs
- check_licenses
- check_scripts
- build_nix_linux_x86_64
- build_nix_mac_aarch64
if: github.repository_owner == 'zed-industries' && always()
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: run_tests::tests_pass
run: |
set +x
EXIT_CODE=0
check_result() {
echo "* $1: $2"
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
}
check_result "orchestrate" "${{ needs.orchestrate.result }}"
check_result "check_style" "${{ needs.check_style.result }}"
check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
check_result "doctests" "${{ needs.doctests.result }}"
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
check_result "check_docs" "${{ needs.check_docs.result }}"
check_result "check_licenses" "${{ needs.check_licenses.result }}"
check_result "check_scripts" "${{ needs.check_scripts.result }}"
check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}"
check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}"
exit $EXIT_CODE
shell: bash -euxo pipefail {0}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true

21
.github/workflows/script_checks.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
name: Script
on:
pull_request:
paths:
- "script/**"
push:
branches:
- main
jobs:
shellcheck:
name: "ShellCheck Scripts"
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Shellcheck ./scripts
run: |
./script/shellcheck-scripts error

View File

@@ -63,7 +63,7 @@ jobs:
- name: Run unit evals - name: Run unit evals
shell: bash -euxo pipefail {0} shell: bash -euxo pipefail {0}
run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' run: cargo nextest run --workspace --no-fail-fast --features eval --no-capture -E 'test(::eval_)'
env: env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}

1
.gitignore vendored
View File

@@ -25,7 +25,6 @@
/crates/collab/seed.json /crates/collab/seed.json
/crates/theme/schemas/theme.json /crates/theme/schemas/theme.json
/crates/zed/resources/flatpak/flatpak-cargo-sources.json /crates/zed/resources/flatpak/flatpak-cargo-sources.json
/crates/project_panel/benches/linux_repo_snapshot.txt
/dev.zed.Zed*.json /dev.zed.Zed*.json
/node_modules/ /node_modules/
/plugins/bin /plugins/bin

View File

@@ -48,7 +48,7 @@
"remove_trailing_whitespace_on_save": true, "remove_trailing_whitespace_on_save": true,
"ensure_final_newline_on_save": true, "ensure_final_newline_on_save": true,
"file_scan_exclusions": [ "file_scan_exclusions": [
"crates/agent/src/edit_agent/evals/fixtures", "crates/assistant_tools/src/edit_agent/evals/fixtures",
"crates/eval/worktrees/", "crates/eval/worktrees/",
"crates/eval/repos/", "crates/eval/repos/",
"**/.git", "**/.git",

6742
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@ members = [
"crates/action_log", "crates/action_log",
"crates/activity_indicator", "crates/activity_indicator",
"crates/agent", "crates/agent",
"crates/agent2",
"crates/agent_servers", "crates/agent_servers",
"crates/agent_settings", "crates/agent_settings",
"crates/agent_ui", "crates/agent_ui",
@@ -13,9 +14,11 @@ members = [
"crates/anthropic", "crates/anthropic",
"crates/askpass", "crates/askpass",
"crates/assets", "crates/assets",
"crates/assistant_text_thread", "crates/assistant_context",
"crates/assistant_slash_command", "crates/assistant_slash_command",
"crates/assistant_slash_commands", "crates/assistant_slash_commands",
"crates/assistant_tool",
"crates/assistant_tools",
"crates/audio", "crates/audio",
"crates/auto_update", "crates/auto_update",
"crates/auto_update_helper", "crates/auto_update_helper",
@@ -70,7 +73,6 @@ members = [
"crates/file_finder", "crates/file_finder",
"crates/file_icons", "crates/file_icons",
"crates/fs", "crates/fs",
"crates/fs_benchmarks",
"crates/fsevent", "crates/fsevent",
"crates/fuzzy", "crates/fuzzy",
"crates/git", "crates/git",
@@ -148,7 +150,6 @@ members = [
"crates/semantic_version", "crates/semantic_version",
"crates/session", "crates/session",
"crates/settings", "crates/settings",
"crates/settings_json",
"crates/settings_macros", "crates/settings_macros",
"crates/settings_profile_selector", "crates/settings_profile_selector",
"crates/settings_ui", "crates/settings_ui",
@@ -163,7 +164,6 @@ members = [
"crates/sum_tree", "crates/sum_tree",
"crates/supermaven", "crates/supermaven",
"crates/supermaven_api", "crates/supermaven_api",
"crates/codestral",
"crates/svg_preview", "crates/svg_preview",
"crates/system_specs", "crates/system_specs",
"crates/tab_switcher", "crates/tab_switcher",
@@ -220,6 +220,7 @@ members = [
# #
"tooling/perf", "tooling/perf",
"tooling/workspace-hack",
"tooling/xtask", "tooling/xtask",
] ]
default-members = ["crates/zed"] default-members = ["crates/zed"]
@@ -238,6 +239,7 @@ acp_tools = { path = "crates/acp_tools" }
acp_thread = { path = "crates/acp_thread" } acp_thread = { path = "crates/acp_thread" }
action_log = { path = "crates/action_log" } action_log = { path = "crates/action_log" }
agent = { path = "crates/agent" } agent = { path = "crates/agent" }
agent2 = { path = "crates/agent2" }
activity_indicator = { path = "crates/activity_indicator" } activity_indicator = { path = "crates/activity_indicator" }
agent_ui = { path = "crates/agent_ui" } agent_ui = { path = "crates/agent_ui" }
agent_settings = { path = "crates/agent_settings" } agent_settings = { path = "crates/agent_settings" }
@@ -247,9 +249,11 @@ ai_onboarding = { path = "crates/ai_onboarding" }
anthropic = { path = "crates/anthropic" } anthropic = { path = "crates/anthropic" }
askpass = { path = "crates/askpass" } askpass = { path = "crates/askpass" }
assets = { path = "crates/assets" } assets = { path = "crates/assets" }
assistant_text_thread = { path = "crates/assistant_text_thread" } assistant_context = { path = "crates/assistant_context" }
assistant_slash_command = { path = "crates/assistant_slash_command" } assistant_slash_command = { path = "crates/assistant_slash_command" }
assistant_slash_commands = { path = "crates/assistant_slash_commands" } assistant_slash_commands = { path = "crates/assistant_slash_commands" }
assistant_tool = { path = "crates/assistant_tool" }
assistant_tools = { path = "crates/assistant_tools" }
audio = { path = "crates/audio" } audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" } auto_update = { path = "crates/auto_update" }
auto_update_helper = { path = "crates/auto_update_helper" } auto_update_helper = { path = "crates/auto_update_helper" }
@@ -269,7 +273,7 @@ cloud_llm_client = { path = "crates/cloud_llm_client" }
cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" } cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" }
collab = { path = "crates/collab" } collab = { path = "crates/collab" }
collab_ui = { path = "crates/collab_ui" } collab_ui = { path = "crates/collab_ui" }
collections = { path = "crates/collections", version = "0.1.0" } collections = { path = "crates/collections", package = "zed-collections", version = "0.1.0" }
command_palette = { path = "crates/command_palette" } command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" } command_palette_hooks = { path = "crates/command_palette_hooks" }
component = { path = "crates/component" } component = { path = "crates/component" }
@@ -285,7 +289,7 @@ debug_adapter_extension = { path = "crates/debug_adapter_extension" }
debugger_tools = { path = "crates/debugger_tools" } debugger_tools = { path = "crates/debugger_tools" }
debugger_ui = { path = "crates/debugger_ui" } debugger_ui = { path = "crates/debugger_ui" }
deepseek = { path = "crates/deepseek" } deepseek = { path = "crates/deepseek" }
derive_refineable = { path = "crates/refineable/derive_refineable" } derive_refineable = { path = "crates/refineable/derive_refineable", package = "zed-derive-refineable", version = "0.1.0" }
diagnostics = { path = "crates/diagnostics" } diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" } editor = { path = "crates/editor" }
extension = { path = "crates/extension" } extension = { path = "crates/extension" }
@@ -304,10 +308,10 @@ git_ui = { path = "crates/git_ui" }
go_to_line = { path = "crates/go_to_line" } go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" } google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui", default-features = false } gpui = { path = "crates/gpui", default-features = false }
gpui_macros = { path = "crates/gpui_macros" } gpui_macros = { path = "crates/gpui_macros", package = "gpui-macros", version = "0.1.0" }
gpui_tokio = { path = "crates/gpui_tokio" } gpui_tokio = { path = "crates/gpui_tokio" }
html_to_markdown = { path = "crates/html_to_markdown" } html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" } http_client = { path = "crates/http_client", package = "zed-http-client", version = "0.1.0" }
http_client_tls = { path = "crates/http_client_tls" } http_client_tls = { path = "crates/http_client_tls" }
icons = { path = "crates/icons" } icons = { path = "crates/icons" }
image_viewer = { path = "crates/image_viewer" } image_viewer = { path = "crates/image_viewer" }
@@ -336,7 +340,7 @@ lsp = { path = "crates/lsp" }
markdown = { path = "crates/markdown" } markdown = { path = "crates/markdown" }
markdown_preview = { path = "crates/markdown_preview" } markdown_preview = { path = "crates/markdown_preview" }
svg_preview = { path = "crates/svg_preview" } svg_preview = { path = "crates/svg_preview" }
media = { path = "crates/media" } media = { path = "crates/media", package = "zed-media", version = "0.1.0" }
menu = { path = "crates/menu" } menu = { path = "crates/menu" }
migrator = { path = "crates/migrator" } migrator = { path = "crates/migrator" }
mistral = { path = "crates/mistral" } mistral = { path = "crates/mistral" }
@@ -353,7 +357,7 @@ outline = { path = "crates/outline" }
outline_panel = { path = "crates/outline_panel" } outline_panel = { path = "crates/outline_panel" }
panel = { path = "crates/panel" } panel = { path = "crates/panel" }
paths = { path = "crates/paths" } paths = { path = "crates/paths" }
perf = { path = "tooling/perf" } perf = { path = "tooling/perf", package = "zed-perf", version = "0.1.0" }
picker = { path = "crates/picker" } picker = { path = "crates/picker" }
plugin = { path = "crates/plugin" } plugin = { path = "crates/plugin" }
plugin_macros = { path = "crates/plugin_macros" } plugin_macros = { path = "crates/plugin_macros" }
@@ -365,7 +369,7 @@ project_symbols = { path = "crates/project_symbols" }
prompt_store = { path = "crates/prompt_store" } prompt_store = { path = "crates/prompt_store" }
proto = { path = "crates/proto" } proto = { path = "crates/proto" }
recent_projects = { path = "crates/recent_projects" } recent_projects = { path = "crates/recent_projects" }
refineable = { path = "crates/refineable" } refineable = { path = "crates/refineable", package = "zed-refineable", version = "0.1.0" }
release_channel = { path = "crates/release_channel" } release_channel = { path = "crates/release_channel" }
scheduler = { path = "crates/scheduler" } scheduler = { path = "crates/scheduler" }
remote = { path = "crates/remote" } remote = { path = "crates/remote" }
@@ -373,15 +377,14 @@ remote_server = { path = "crates/remote_server" }
repl = { path = "crates/repl" } repl = { path = "crates/repl" }
reqwest_client = { path = "crates/reqwest_client" } reqwest_client = { path = "crates/reqwest_client" }
rich_text = { path = "crates/rich_text" } rich_text = { path = "crates/rich_text" }
rodio = { git = "https://github.com/RustAudio/rodio", rev ="e2074c6c2acf07b57cf717e076bdda7a9ac6e70b", features = ["wav", "playback", "wav_output", "recording"] } rodio = { git = "https://github.com/RustAudio/rodio" }
rope = { path = "crates/rope" } rope = { path = "crates/rope" }
rpc = { path = "crates/rpc" } rpc = { path = "crates/rpc" }
rules_library = { path = "crates/rules_library" } rules_library = { path = "crates/rules_library" }
search = { path = "crates/search" } search = { path = "crates/search" }
semantic_version = { path = "crates/semantic_version" } semantic_version = { path = "crates/semantic_version", package = "zed-semantic-version", version = "0.1.0" }
session = { path = "crates/session" } session = { path = "crates/session" }
settings = { path = "crates/settings" } settings = { path = "crates/settings" }
settings_json = { path = "crates/settings_json" }
settings_macros = { path = "crates/settings_macros" } settings_macros = { path = "crates/settings_macros" }
settings_ui = { path = "crates/settings_ui" } settings_ui = { path = "crates/settings_ui" }
snippet = { path = "crates/snippet" } snippet = { path = "crates/snippet" }
@@ -392,10 +395,9 @@ sqlez_macros = { path = "crates/sqlez_macros" }
story = { path = "crates/story" } story = { path = "crates/story" }
storybook = { path = "crates/storybook" } storybook = { path = "crates/storybook" }
streaming_diff = { path = "crates/streaming_diff" } streaming_diff = { path = "crates/streaming_diff" }
sum_tree = { path = "crates/sum_tree" } sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" }
supermaven = { path = "crates/supermaven" } supermaven = { path = "crates/supermaven" }
supermaven_api = { path = "crates/supermaven_api" } supermaven_api = { path = "crates/supermaven_api" }
codestral = { path = "crates/codestral" }
system_specs = { path = "crates/system_specs" } system_specs = { path = "crates/system_specs" }
tab_switcher = { path = "crates/tab_switcher" } tab_switcher = { path = "crates/tab_switcher" }
task = { path = "crates/task" } task = { path = "crates/task" }
@@ -416,8 +418,8 @@ ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" } ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" } ui_macros = { path = "crates/ui_macros" }
ui_prompt = { path = "crates/ui_prompt" } ui_prompt = { path = "crates/ui_prompt" }
util = { path = "crates/util" } util = { path = "crates/util", package = "zed-util", version = "0.1.0" }
util_macros = { path = "crates/util_macros" } util_macros = { path = "crates/util_macros", package = "zed-util-macros", version = "0.1.0" }
vercel = { path = "crates/vercel" } vercel = { path = "crates/vercel" }
vim = { path = "crates/vim" } vim = { path = "crates/vim" }
vim_mode_setting = { path = "crates/vim_mode_setting" } vim_mode_setting = { path = "crates/vim_mode_setting" }
@@ -440,7 +442,7 @@ zlog_settings = { path = "crates/zlog_settings" }
# External crates # External crates
# #
agent-client-protocol = { version = "0.7.0", features = ["unstable"] } agent-client-protocol = { version = "0.4.3", features = ["unstable"] }
aho-corasick = "1.1" aho-corasick = "1.1"
alacritty_terminal = "0.25.1-rc1" alacritty_terminal = "0.25.1-rc1"
any_vec = "0.14" any_vec = "0.14"
@@ -451,13 +453,12 @@ async-compat = "0.2.1"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] } async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-dispatcher = "0.1" async-dispatcher = "0.1"
async-fs = "2.1" async-fs = "2.1"
async-lock = "2.1"
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" } async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553" }
async-recursion = "1.0.0" async-recursion = "1.0.0"
async-tar = "0.5.1" async-tar = "0.5.0"
async-task = "4.7" async-task = "4.7"
async-trait = "0.1" async-trait = "0.1"
async-tungstenite = "0.31.0" async-tungstenite = "0.29.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] } async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] } aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
aws-credential-types = { version = "1.2.2", features = [ aws-credential-types = { version = "1.2.2", features = [
@@ -475,6 +476,7 @@ bitflags = "2.6.0"
blade-graphics = { version = "0.7.0" } blade-graphics = { version = "0.7.0" }
blade-macros = { version = "0.3.0" } blade-macros = { version = "0.3.0" }
blade-util = { version = "0.3.0" } blade-util = { version = "0.3.0" }
blake3 = "1.5.3"
bytes = "1.0" bytes = "1.0"
cargo_metadata = "0.19" cargo_metadata = "0.19"
cargo_toml = "0.21" cargo_toml = "0.21"
@@ -483,10 +485,10 @@ chrono = { version = "0.4", features = ["serde"] }
ciborium = "0.2" ciborium = "0.2"
circular-buffer = "1.0" circular-buffer = "1.0"
clap = { version = "4.4", features = ["derive"] } clap = { version = "4.4", features = ["derive"] }
cocoa = "=0.26.0" cocoa = "0.26"
cocoa-foundation = "=0.2.0" cocoa-foundation = "0.2.0"
convert_case = "0.8.0" convert_case = "0.8.0"
core-foundation = "=0.10.0" core-foundation = "0.10.0"
core-foundation-sys = "0.8.6" core-foundation-sys = "0.8.6"
core-video = { version = "0.4.3", features = ["metal"] } core-video = { version = "0.4.3", features = ["metal"] }
cpal = "0.16" cpal = "0.16"
@@ -508,7 +510,6 @@ fork = "0.2.0"
futures = "0.3" futures = "0.3"
futures-batch = "0.6.1" futures-batch = "0.6.1"
futures-lite = "1.13" futures-lite = "1.13"
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "0090c6b6ef82fff02bc8616645953e778d1acc08" }
git2 = { version = "0.20.1", default-features = false } git2 = { version = "0.20.1", default-features = false }
globset = "0.4" globset = "0.4"
handlebars = "4.3" handlebars = "4.3"
@@ -537,7 +538,7 @@ libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0" linkify = "0.10.0"
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "b71ab4eeb27d9758be8092020a46fe33fbca4e33" } lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "0874f8742fe55b4dc94308c1e3c0069710d8eeaf" }
mach2 = "0.5" mach2 = "0.5"
markup5ever_rcdom = "0.3.0" markup5ever_rcdom = "0.3.0"
metal = "0.29" metal = "0.29"
@@ -550,7 +551,7 @@ nix = "0.29"
num-format = "0.4.4" num-format = "0.4.4"
num-traits = "0.2" num-traits = "0.2"
objc = "0.2" objc = "0.2"
objc2-foundation = { version = "=0.3.1", default-features = false, features = [ objc2-foundation = { version = "0.3", default-features = false, features = [
"NSArray", "NSArray",
"NSAttributedString", "NSAttributedString",
"NSBundle", "NSBundle",
@@ -583,14 +584,14 @@ partial-json-fixer = "0.5.3"
parse_int = "0.9" parse_int = "0.9"
pciid-parser = "0.8.0" pciid-parser = "0.8.0"
pathdiff = "0.2" pathdiff = "0.2"
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" } pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
portable-pty = "0.9.0" portable-pty = "0.9.0"
postage = { version = "0.5", features = ["futures-traits"] } postage = { version = "0.5", features = ["futures-traits"] }
pretty_assertions = { version = "1.3.0", features = ["unstable"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] }
@@ -648,11 +649,11 @@ sqlformat = "0.2"
stacksafe = "0.1" stacksafe = "0.1"
streaming-iterator = "0.1" streaming-iterator = "0.1"
strsim = "0.11" strsim = "0.11"
strum = { version = "0.27.2", features = ["derive"] } strum = { version = "0.27.0", features = ["derive"] }
subtle = "2.5.0" subtle = "2.5.0"
syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] } syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] }
sys-locale = "0.3.1" sys-locale = "0.3.1"
sysinfo = "0.37.0" sysinfo = "0.31.0"
take-until = "0.2.0" take-until = "0.2.0"
tempfile = "3.20.0" tempfile = "3.20.0"
thiserror = "2.0.12" thiserror = "2.0.12"
@@ -691,7 +692,7 @@ tree-sitter-python = "0.25"
tree-sitter-regex = "0.24" tree-sitter-regex = "0.24"
tree-sitter-ruby = "0.23" tree-sitter-ruby = "0.23"
tree-sitter-rust = "0.24" tree-sitter-rust = "0.24"
tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347 tree-sitter-typescript = "0.23"
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
unicase = "2.6" unicase = "2.6"
unicode-script = "0.5.7" unicode-script = "0.5.7"
@@ -716,6 +717,7 @@ wasmtime-wasi = "29"
which = "6.0.0" which = "6.0.0"
windows-core = "0.61" windows-core = "0.61"
wit-component = "0.221" wit-component = "0.221"
workspace-hack = "0.1.0"
yawc = "0.2.5" yawc = "0.2.5"
zeroize = "1.8" zeroize = "1.8"
zstd = "0.11" zstd = "0.11"
@@ -776,10 +778,11 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5a
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" } notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
# Makes the workspace hack crate refer to the local one, but only when you're building locally
workspace-hack = { path = "tooling/workspace-hack" }
[profile.dev] [profile.dev]
split-debuginfo = "unpacked" split-debuginfo = "unpacked"
# https://github.com/rust-lang/cargo/issues/16104
incremental = false
codegen-units = 16 codegen-units = 16
# mirror configuration for crates compiled for the build platform # mirror configuration for crates compiled for the build platform
@@ -801,7 +804,7 @@ wasmtime = { opt-level = 3 }
activity_indicator = { codegen-units = 1 } activity_indicator = { codegen-units = 1 }
assets = { codegen-units = 1 } assets = { codegen-units = 1 }
breadcrumbs = { codegen-units = 1 } breadcrumbs = { codegen-units = 1 }
collections = { codegen-units = 1 } zed-collections = { codegen-units = 1 }
command_palette = { codegen-units = 1 } command_palette = { codegen-units = 1 }
command_palette_hooks = { codegen-units = 1 } command_palette_hooks = { codegen-units = 1 }
extension_cli = { codegen-units = 1 } extension_cli = { codegen-units = 1 }
@@ -821,11 +824,11 @@ outline = { codegen-units = 1 }
paths = { codegen-units = 1 } paths = { codegen-units = 1 }
prettier = { codegen-units = 1 } prettier = { codegen-units = 1 }
project_symbols = { codegen-units = 1 } project_symbols = { codegen-units = 1 }
refineable = { codegen-units = 1 } zed-refineable = { codegen-units = 1 }
release_channel = { codegen-units = 1 } release_channel = { codegen-units = 1 }
reqwest_client = { codegen-units = 1 } reqwest_client = { codegen-units = 1 }
rich_text = { codegen-units = 1 } rich_text = { codegen-units = 1 }
semantic_version = { codegen-units = 1 } zed-semantic-version = { codegen-units = 1 }
session = { codegen-units = 1 } session = { codegen-units = 1 }
snippet = { codegen-units = 1 } snippet = { codegen-units = 1 }
snippets_ui = { codegen-units = 1 } snippets_ui = { codegen-units = 1 }
@@ -905,5 +908,5 @@ ignored = [
"serde", "serde",
"component", "component",
"documented", "documented",
"sea-orm-macros", "workspace-hack",
] ]

2
Cross.toml Normal file
View File

@@ -0,0 +1,2 @@
[build]
dockerfile = "Dockerfile-cross"

17
Dockerfile-cross Normal file
View File

@@ -0,0 +1,17 @@
# syntax=docker/dockerfile:1
ARG CROSS_BASE_IMAGE
FROM ${CROSS_BASE_IMAGE}
WORKDIR /app
ARG TZ=Etc/UTC \
LANG=C.UTF-8 \
LC_ALL=C.UTF-8 \
DEBIAN_FRONTEND=noninteractive
ENV CARGO_TERM_COLOR=always
COPY script/install-mold script/
RUN ./script/install-mold "2.34.0"
COPY script/remote-server script/
RUN ./script/remote-server
COPY . .

2
Procfile.postgrest Normal file
View File

@@ -0,0 +1,2 @@
app: postgrest crates/collab/postgrest_app.conf
llm: postgrest crates/collab/postgrest_llm.conf

View File

@@ -1 +1,2 @@
postgrest_llm: postgrest crates/collab/postgrest_llm.conf
website: cd ../zed.dev; npm run dev -- --port=3000 website: cd ../zed.dev; npm run dev -- --port=3000

View File

@@ -9,10 +9,11 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of
### Installation ### Installation
On macOS, Linux, and Windows you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager). On macOS and Linux you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager).
Other platforms are not yet available: Other platforms are not yet available:
- Windows ([tracking issue](https://github.com/zed-industries/zed/issues/5394))
- Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396)) - Web ([tracking issue](https://github.com/zed-industries/zed/issues/5396))
### Developing Zed ### Developing Zed

View File

@@ -1,114 +0,0 @@
; This file contains a list of people who're interested in reviewing pull requests
; to certain parts of the code-base.
;
; This is mostly used internally for PR assignment, and may change over time.
;
; If you have permission to merge PRs (mostly equivalent to "do you work at Zed Industries"),
; we strongly encourage you to put your name in the "all" bucket, but you can also add yourself
; to other areas too.
<all>
= @ConradIrwin
= @maxdeviant
= @SomeoneToIgnore
= @probably-neb
= @danilo-leal
= @Veykril
= @kubkon
= @p1n3appl3
= @dinocosta
= @smitbarmase
= @cole-miller
= @HactarCE
vim
= @ConradIrwin
= @probably-neb
= @p1n3appl3
= @dinocosta
gpui
= @mikayla-maki
git
= @cole-miller
= @danilo-leal
linux
= @dvdsk
= @smitbarmase
= @p1n3appl3
= @cole-miller
= @probably-neb
windows
= @reflectronic
= @localcc
pickers
= @p1n3appl3
= @dvdsk
= @SomeoneToIgnore
audio
= @dvdsk
helix
= @kubkon
terminal
= @kubkon
= @Veykril
debugger
= @kubkon
= @osiewicz
= @Anthony-Eid
extension
= @kubkon
settings_ui
= @probably-neb
= @danilo-leal
= @Anthony-Eid
crashes
= @p1n3appl3
= @Veykril
ai
= @rtfeldman
= @danilo-leal
= @benbrandt
= @bennetbo
design
= @danilo-leal
multi_buffer
= @Veykril
= @SomeoneToIgnore
lsp
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
languages
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
= @probably-neb
project_panel
= @smitbarmase
tasks
= @SomeoneToIgnore
= @Veykril
docs
= @probably-neb

View File

@@ -1,3 +1,9 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.2806 4.66818L8.26042 1.76982C8.09921 1.67673 7.9003 1.67673 7.73909 1.76982L2.71918 4.66818C2.58367 4.74642 2.5 4.89112 2.5 5.04785V10.8924C2.5 11.0489 2.58367 11.1938 2.71918 11.2721L7.73934 14.1704C7.90054 14.2635 8.09946 14.2635 8.26066 14.1704L13.2808 11.2721C13.4163 11.1938 13.5 11.0491 13.5 10.8924V5.04785C13.5 4.89136 13.4163 4.74642 13.2808 4.66818H13.2806ZM12.9653 5.28212L8.11901 13.676C8.08626 13.7326 7.99977 13.7095 7.99977 13.6439V8.14771C7.99977 8.03788 7.94107 7.9363 7.84586 7.88115L3.08613 5.13317C3.02957 5.10041 3.05266 5.0139 3.11818 5.0139H12.8106C12.9483 5.0139 13.0343 5.1631 12.9655 5.28236H12.9653V5.28212Z" fill="#C4CAD4"/> <path opacity="0.6" d="M3.5 11V5.5L8.5 8L3.5 11Z" fill="black"/>
<path opacity="0.4" d="M8.5 14L3.5 11L8.5 8V14Z" fill="black"/>
<path opacity="0.6" d="M8.5 5.5H3.5L8.5 2.5L8.5 5.5Z" fill="black"/>
<path opacity="0.8" d="M8.5 5.5V2.5L13.5 5.5H8.5Z" fill="black"/>
<path opacity="0.2" d="M13.5 11L8.5 14L11 9.5L13.5 11Z" fill="black"/>
<path opacity="0.5" d="M13.5 11L11 9.5L13.5 5V11Z" fill="black"/>
<path d="M3.5 11V5L8.5 2.11325L13.5 5V11L8.5 13.8868L3.5 11Z" stroke="black"/>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 769 B

After

Width:  |  Height:  |  Size: 583 B

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M6.2 11H5C4.20435 11 3.44129 10.6839 2.87868 10.1213C2.31607 9.55871 2 8.79565 2 8C2 7.20435 2.31607 6.44129 2.87868 5.87868C3.44129 5.31607 4.20435 5 5 5H6.2" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.80005 5H11C11.7957 5 12.5588 5.31607 13.1214 5.87868C13.684 6.44129 14 7.20435 14 8C14 8.79565 13.684 9.55871 13.1214 10.1213C12.5588 10.6839 11.7957 11 11 11H9.80005" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5.6001 8H10.4001" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 735 B

View File

@@ -1,4 +1 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2.6 5v3.6h3.6"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M13.4 11A5.4 5.4 0 0 0 8 5.6a5.4 5.4 0 0 0-3.6 1.38L2.6 8.6"/></svg>
<path d="M6.125 9.25001L3 6.125L6.125 3" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M3 6.125H9.56251C10.0139 6.125 10.4609 6.21391 10.878 6.38666C11.295 6.55942 11.674 6.81262 11.9932 7.13182C12.3124 7.45102 12.5656 7.82997 12.7383 8.24703C12.9111 8.66408 13 9.11108 13 9.5625C13 10.0139 12.9111 10.4609 12.7383 10.878C12.5656 11.295 12.3124 11.674 11.9932 11.9932C11.674 12.3124 11.295 12.5656 10.878 12.7383C10.4609 12.9111 10.0139 13 9.56251 13H7.375" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 692 B

After

Width:  |  Height:  |  Size: 339 B

View File

@@ -30,8 +30,8 @@
"ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
"ctrl-,": "zed::OpenSettings", "ctrl-,": "zed::OpenSettingsEditor",
"ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-alt-,": "zed::OpenSettings",
"ctrl-q": "zed::Quit", "ctrl-q": "zed::Quit",
"f4": "debugger::Start", "f4": "debugger::Start",
"shift-f5": "debugger::Stop", "shift-f5": "debugger::Stop",
@@ -139,7 +139,7 @@
"find": "buffer_search::Deploy", "find": "buffer_search::Deploy",
"ctrl-f": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy",
"ctrl-h": "buffer_search::DeployReplace", "ctrl-h": "buffer_search::DeployReplace",
"ctrl->": "agent::AddSelectionToThread", "ctrl->": "agent::QuoteSelection",
"ctrl-<": "assistant::InsertIntoEditor", "ctrl-<": "assistant::InsertIntoEditor",
"ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-alt-e": "editor::SelectEnclosingSymbol",
"ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-backspace": "editor::GoToPreviousChange",
@@ -243,7 +243,7 @@
"ctrl-shift-i": "agent::ToggleOptionsMenu", "ctrl-shift-i": "agent::ToggleOptionsMenu",
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor", "shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl->": "agent::AddSelectionToThread", "ctrl->": "agent::QuoteSelection",
"ctrl-alt-e": "agent::RemoveAllContext", "ctrl-alt-e": "agent::RemoveAllContext",
"ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-e": "project_panel::ToggleFocus",
"ctrl-shift-enter": "agent::ContinueThread", "ctrl-shift-enter": "agent::ContinueThread",
@@ -269,14 +269,14 @@
} }
}, },
{ {
"context": "AgentPanel && text_thread", "context": "AgentPanel && prompt_editor",
"bindings": { "bindings": {
"ctrl-n": "agent::NewTextThread", "ctrl-n": "agent::NewTextThread",
"ctrl-alt-t": "agent::NewThread" "ctrl-alt-t": "agent::NewThread"
} }
}, },
{ {
"context": "AgentPanel && acp_thread", "context": "AgentPanel && external_agent_thread",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-n": "agent::NewExternalAgentThread", "ctrl-n": "agent::NewExternalAgentThread",
@@ -366,7 +366,7 @@
} }
}, },
{ {
"context": "RulesLibrary", "context": "PromptLibrary",
"bindings": { "bindings": {
"new": "rules_library::NewRule", "new": "rules_library::NewRule",
"ctrl-n": "rules_library::NewRule", "ctrl-n": "rules_library::NewRule",
@@ -374,6 +374,13 @@
"ctrl-w": "workspace::CloseWindow" "ctrl-w": "workspace::CloseWindow"
} }
}, },
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"ctrl-w": "workspace::CloseWindow"
}
},
{ {
"context": "BufferSearchBar", "context": "BufferSearchBar",
"bindings": { "bindings": {
@@ -491,8 +498,8 @@
"bindings": { "bindings": {
"ctrl-[": "editor::Outdent", "ctrl-[": "editor::Outdent",
"ctrl-]": "editor::Indent", "ctrl-]": "editor::Indent",
"shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above "shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above
"shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below "shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below
"ctrl-shift-k": "editor::DeleteLine", "ctrl-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp", "alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown", "alt-down": "editor::MoveLineDown",
@@ -527,15 +534,15 @@
"ctrl-k ctrl-l": "editor::ToggleFold", "ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive", "ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive", "ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-1": "editor::FoldAtLevel_1", "ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
"ctrl-k ctrl-2": "editor::FoldAtLevel_2", "ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
"ctrl-k ctrl-3": "editor::FoldAtLevel_3", "ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
"ctrl-k ctrl-4": "editor::FoldAtLevel_4", "ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
"ctrl-k ctrl-5": "editor::FoldAtLevel_5", "ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
"ctrl-k ctrl-6": "editor::FoldAtLevel_6", "ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
"ctrl-k ctrl-7": "editor::FoldAtLevel_7", "ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
"ctrl-k ctrl-8": "editor::FoldAtLevel_8", "ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
"ctrl-k ctrl-9": "editor::FoldAtLevel_9", "ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
"ctrl-k ctrl-0": "editor::FoldAll", "ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions", "ctrl-space": "editor::ShowCompletions",
@@ -609,7 +616,7 @@
"ctrl-alt-b": "workspace::ToggleRightDock", "ctrl-alt-b": "workspace::ToggleRightDock",
"ctrl-b": "workspace::ToggleLeftDock", "ctrl-b": "workspace::ToggleLeftDock",
"ctrl-j": "workspace::ToggleBottomDock", "ctrl-j": "workspace::ToggleBottomDock",
"ctrl-alt-y": "workspace::ToggleAllDocks", "ctrl-alt-y": "workspace::CloseAllDocks",
"ctrl-alt-0": "workspace::ResetActiveDockSize", "ctrl-alt-0": "workspace::ResetActiveDockSize",
// For 0px parameter, uses UI font size value. // For 0px parameter, uses UI font size value.
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
@@ -621,7 +628,7 @@
"ctrl-shift-f": "pane::DeploySearch", "ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
"ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-shift-t": "pane::ReopenClosedItem",
"ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-s": "zed::OpenKeymapEditor",
"ctrl-k ctrl-t": "theme_selector::Toggle", "ctrl-k ctrl-t": "theme_selector::Toggle",
"ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-alt-super-p": "settings_profile_selector::Toggle",
"ctrl-t": "project_symbols::Toggle", "ctrl-t": "project_symbols::Toggle",
@@ -731,14 +738,6 @@
"tab": "editor::ComposeCompletion" "tab": "editor::ComposeCompletion"
} }
}, },
{
"context": "Editor && in_snippet",
"use_key_equivalents": true,
"bindings": {
"alt-right": "editor::NextSnippetTabstop",
"alt-left": "editor::PreviousSnippetTabstop"
}
},
// Bindings for accepting edit predictions // Bindings for accepting edit predictions
// //
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
@@ -1020,8 +1019,7 @@
"context": "CollabPanel", "context": "CollabPanel",
"bindings": { "bindings": {
"alt-up": "collab_panel::MoveChannelUp", "alt-up": "collab_panel::MoveChannelUp",
"alt-down": "collab_panel::MoveChannelDown", "alt-down": "collab_panel::MoveChannelDown"
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
} }
}, },
{ {
@@ -1089,8 +1087,7 @@
{ {
"context": "StashList || (StashList > Picker > Editor)", "context": "StashList || (StashList > Picker > Editor)",
"bindings": { "bindings": {
"ctrl-shift-backspace": "stash_picker::DropStashItem", "ctrl-shift-backspace": "stash_picker::DropStashItem"
"ctrl-shift-v": "stash_picker::ShowStashItem"
} }
}, },
{ {
@@ -1135,8 +1132,7 @@
"ctrl-shift-space": "terminal::ToggleViMode", "ctrl-shift-space": "terminal::ToggleViMode",
"ctrl-shift-r": "terminal::RerunTask", "ctrl-shift-r": "terminal::RerunTask",
"ctrl-alt-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask",
"alt-t": "terminal::RerunTask", "alt-t": "terminal::RerunTask"
"ctrl-shift-5": "pane::SplitRight"
} }
}, },
{ {
@@ -1240,6 +1236,9 @@
"context": "Onboarding", "context": "Onboarding",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-1": "onboarding::ActivateBasicsPage",
"ctrl-2": "onboarding::ActivateEditingPage",
"ctrl-3": "onboarding::ActivateAISetupPage",
"ctrl-enter": "onboarding::Finish", "ctrl-enter": "onboarding::Finish",
"alt-shift-l": "onboarding::SignIn", "alt-shift-l": "onboarding::SignIn",
"alt-shift-a": "onboarding::OpenAccount" "alt-shift-a": "onboarding::OpenAccount"
@@ -1251,69 +1250,5 @@
"bindings": { "bindings": {
"ctrl-shift-enter": "workspace::OpenWithSystem" "ctrl-shift-enter": "workspace::OpenWithSystem"
} }
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"ctrl-w": "workspace::CloseWindow",
"escape": "workspace::CloseWindow",
"ctrl-m": "settings_editor::Minimize",
"ctrl-f": "search::FocusSearch",
"left": "settings_editor::ToggleFocusNav",
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
// todo(settings_ui): cut this down based on the max files and overflow UI
"ctrl-1": ["settings_editor::FocusFile", 0],
"ctrl-2": ["settings_editor::FocusFile", 1],
"ctrl-3": ["settings_editor::FocusFile", 2],
"ctrl-4": ["settings_editor::FocusFile", 3],
"ctrl-5": ["settings_editor::FocusFile", 4],
"ctrl-6": ["settings_editor::FocusFile", 5],
"ctrl-7": ["settings_editor::FocusFile", 6],
"ctrl-8": ["settings_editor::FocusFile", 7],
"ctrl-9": ["settings_editor::FocusFile", 8],
"ctrl-0": ["settings_editor::FocusFile", 9],
"ctrl-pageup": "settings_editor::FocusPreviousFile",
"ctrl-pagedown": "settings_editor::FocusNextFile"
}
},
{
"context": "StashDiff > Editor",
"bindings": {
"ctrl-space": "git::ApplyCurrentStash",
"ctrl-shift-space": "git::PopCurrentStash",
"ctrl-shift-backspace": "git::DropCurrentStash"
}
},
{
"context": "SettingsWindow > NavigationMenu",
"use_key_equivalents": true,
"bindings": {
"up": "settings_editor::FocusPreviousNavEntry",
"shift-tab": "settings_editor::FocusPreviousNavEntry",
"down": "settings_editor::FocusNextNavEntry",
"tab": "settings_editor::FocusNextNavEntry",
"right": "settings_editor::ExpandNavEntry",
"left": "settings_editor::CollapseNavEntry",
"pageup": "settings_editor::FocusPreviousRootNavEntry",
"pagedown": "settings_editor::FocusNextRootNavEntry",
"home": "settings_editor::FocusFirstNavEntry",
"end": "settings_editor::FocusLastNavEntry"
}
},
{
"context": "Zeta2Feedback > Editor",
"bindings": {
"enter": "editor::Newline",
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
}
},
{
"context": "Zeta2Context > Editor",
"bindings": {
"alt-left": "dev::Zeta2ContextGoBack",
"alt-right": "dev::Zeta2ContextGoForward"
}
} }
] ]

View File

@@ -39,8 +39,8 @@
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
"cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }], "cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }],
"cmd-0": ["zed::ResetBufferFontSize", { "persist": false }], "cmd-0": ["zed::ResetBufferFontSize", { "persist": false }],
"cmd-,": "zed::OpenSettings", "cmd-,": "zed::OpenSettingsEditor",
"cmd-alt-,": "zed::OpenSettingsFile", "cmd-alt-,": "zed::OpenSettings",
"cmd-q": "zed::Quit", "cmd-q": "zed::Quit",
"cmd-h": "zed::Hide", "cmd-h": "zed::Hide",
"alt-cmd-h": "zed::HideOthers", "alt-cmd-h": "zed::HideOthers",
@@ -163,7 +163,7 @@
"cmd-alt-f": "buffer_search::DeployReplace", "cmd-alt-f": "buffer_search::DeployReplace",
"cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }], "cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }],
"cmd-e": ["buffer_search::Deploy", { "focus": false }], "cmd-e": ["buffer_search::Deploy", { "focus": false }],
"cmd->": "agent::AddSelectionToThread", "cmd->": "agent::QuoteSelection",
"cmd-<": "assistant::InsertIntoEditor", "cmd-<": "assistant::InsertIntoEditor",
"cmd-alt-e": "editor::SelectEnclosingSymbol", "cmd-alt-e": "editor::SelectEnclosingSymbol",
"alt-enter": "editor::OpenSelectionsInMultibuffer" "alt-enter": "editor::OpenSelectionsInMultibuffer"
@@ -282,7 +282,7 @@
"cmd-shift-i": "agent::ToggleOptionsMenu", "cmd-shift-i": "agent::ToggleOptionsMenu",
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor", "shift-alt-escape": "agent::ExpandMessageEditor",
"cmd->": "agent::AddSelectionToThread", "cmd->": "agent::QuoteSelection",
"cmd-alt-e": "agent::RemoveAllContext", "cmd-alt-e": "agent::RemoveAllContext",
"cmd-shift-e": "project_panel::ToggleFocus", "cmd-shift-e": "project_panel::ToggleFocus",
"cmd-ctrl-b": "agent::ToggleBurnMode", "cmd-ctrl-b": "agent::ToggleBurnMode",
@@ -307,7 +307,7 @@
} }
}, },
{ {
"context": "AgentPanel && text_thread", "context": "AgentPanel && prompt_editor",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"cmd-n": "agent::NewTextThread", "cmd-n": "agent::NewTextThread",
@@ -315,7 +315,7 @@
} }
}, },
{ {
"context": "AgentPanel && acp_thread", "context": "AgentPanel && external_agent_thread",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"cmd-n": "agent::NewExternalAgentThread", "cmd-n": "agent::NewExternalAgentThread",
@@ -423,7 +423,7 @@
} }
}, },
{ {
"context": "RulesLibrary", "context": "PromptLibrary",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"cmd-n": "rules_library::NewRule", "cmd-n": "rules_library::NewRule",
@@ -431,6 +431,13 @@
"cmd-w": "workspace::CloseWindow" "cmd-w": "workspace::CloseWindow"
} }
}, },
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"cmd-w": "workspace::CloseWindow"
}
},
{ {
"context": "BufferSearchBar", "context": "BufferSearchBar",
"use_key_equivalents": true, "use_key_equivalents": true,
@@ -539,10 +546,10 @@
"bindings": { "bindings": {
"cmd-[": "editor::Outdent", "cmd-[": "editor::Outdent",
"cmd-]": "editor::Indent", "cmd-]": "editor::Indent",
"cmd-ctrl-p": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], // Insert cursor above "cmd-ctrl-p": "editor::AddSelectionAbove", // Insert cursor above
"cmd-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], "cmd-alt-up": "editor::AddSelectionAbove",
"cmd-ctrl-n": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], // Insert cursor below "cmd-ctrl-n": "editor::AddSelectionBelow", // Insert cursor below
"cmd-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], "cmd-alt-down": "editor::AddSelectionBelow",
"cmd-shift-k": "editor::DeleteLine", "cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp", "alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown", "alt-down": "editor::MoveLineDown",
@@ -582,15 +589,15 @@
"cmd-k cmd-l": "editor::ToggleFold", "cmd-k cmd-l": "editor::ToggleFold",
"cmd-k cmd-[": "editor::FoldRecursive", "cmd-k cmd-[": "editor::FoldRecursive",
"cmd-k cmd-]": "editor::UnfoldRecursive", "cmd-k cmd-]": "editor::UnfoldRecursive",
"cmd-k cmd-1": "editor::FoldAtLevel_1", "cmd-k cmd-1": ["editor::FoldAtLevel", 1],
"cmd-k cmd-2": "editor::FoldAtLevel_2", "cmd-k cmd-2": ["editor::FoldAtLevel", 2],
"cmd-k cmd-3": "editor::FoldAtLevel_3", "cmd-k cmd-3": ["editor::FoldAtLevel", 3],
"cmd-k cmd-4": "editor::FoldAtLevel_4", "cmd-k cmd-4": ["editor::FoldAtLevel", 4],
"cmd-k cmd-5": "editor::FoldAtLevel_5", "cmd-k cmd-5": ["editor::FoldAtLevel", 5],
"cmd-k cmd-6": "editor::FoldAtLevel_6", "cmd-k cmd-6": ["editor::FoldAtLevel", 6],
"cmd-k cmd-7": "editor::FoldAtLevel_7", "cmd-k cmd-7": ["editor::FoldAtLevel", 7],
"cmd-k cmd-8": "editor::FoldAtLevel_8", "cmd-k cmd-8": ["editor::FoldAtLevel", 8],
"cmd-k cmd-9": "editor::FoldAtLevel_9", "cmd-k cmd-9": ["editor::FoldAtLevel", 9],
"cmd-k cmd-0": "editor::FoldAll", "cmd-k cmd-0": "editor::FoldAll",
"cmd-k cmd-j": "editor::UnfoldAll", "cmd-k cmd-j": "editor::UnfoldAll",
// Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut. // Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
@@ -679,7 +686,7 @@
"cmd-alt-b": "workspace::ToggleRightDock", "cmd-alt-b": "workspace::ToggleRightDock",
"cmd-r": "workspace::ToggleRightDock", "cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock", "cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::ToggleAllDocks", "alt-cmd-y": "workspace::CloseAllDocks",
// For 0px parameter, uses UI font size value. // For 0px parameter, uses UI font size value.
"ctrl-alt-0": "workspace::ResetActiveDockSize", "ctrl-alt-0": "workspace::ResetActiveDockSize",
"ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], "ctrl-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
@@ -690,7 +697,7 @@
"cmd-shift-f": "pane::DeploySearch", "cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }], "cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
"cmd-shift-t": "pane::ReopenClosedItem", "cmd-shift-t": "pane::ReopenClosedItem",
"cmd-k cmd-s": "zed::OpenKeymap", "cmd-k cmd-s": "zed::OpenKeymapEditor",
"cmd-k cmd-t": "theme_selector::Toggle", "cmd-k cmd-t": "theme_selector::Toggle",
"ctrl-alt-cmd-p": "settings_profile_selector::Toggle", "ctrl-alt-cmd-p": "settings_profile_selector::Toggle",
"cmd-t": "project_symbols::Toggle", "cmd-t": "project_symbols::Toggle",
@@ -801,14 +808,6 @@
"tab": "editor::ComposeCompletion" "tab": "editor::ComposeCompletion"
} }
}, },
{
"context": "Editor && in_snippet",
"use_key_equivalents": true,
"bindings": {
"alt-right": "editor::NextSnippetTabstop",
"alt-left": "editor::PreviousSnippetTabstop"
}
},
{ {
"context": "Editor && edit_prediction", "context": "Editor && edit_prediction",
"bindings": { "bindings": {
@@ -1085,8 +1084,7 @@
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"alt-up": "collab_panel::MoveChannelUp", "alt-up": "collab_panel::MoveChannelUp",
"alt-down": "collab_panel::MoveChannelDown", "alt-down": "collab_panel::MoveChannelDown"
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
} }
}, },
{ {
@@ -1162,8 +1160,7 @@
"context": "StashList || (StashList > Picker > Editor)", "context": "StashList || (StashList > Picker > Editor)",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-shift-backspace": "stash_picker::DropStashItem", "ctrl-shift-backspace": "stash_picker::DropStashItem"
"ctrl-shift-v": "stash_picker::ShowStashItem"
} }
}, },
{ {
@@ -1218,7 +1215,6 @@
"ctrl-alt-down": "pane::SplitDown", "ctrl-alt-down": "pane::SplitDown",
"ctrl-alt-left": "pane::SplitLeft", "ctrl-alt-left": "pane::SplitLeft",
"ctrl-alt-right": "pane::SplitRight", "ctrl-alt-right": "pane::SplitRight",
"cmd-d": "pane::SplitRight",
"cmd-alt-r": "terminal::RerunTask" "cmd-alt-r": "terminal::RerunTask"
} }
}, },
@@ -1345,7 +1341,10 @@
"context": "Onboarding", "context": "Onboarding",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"cmd-enter": "onboarding::Finish", "cmd-1": "onboarding::ActivateBasicsPage",
"cmd-2": "onboarding::ActivateEditingPage",
"cmd-3": "onboarding::ActivateAISetupPage",
"cmd-escape": "onboarding::Finish",
"alt-tab": "onboarding::SignIn", "alt-tab": "onboarding::SignIn",
"alt-shift-a": "onboarding::OpenAccount" "alt-shift-a": "onboarding::OpenAccount"
} }
@@ -1356,70 +1355,5 @@
"bindings": { "bindings": {
"ctrl-shift-enter": "workspace::OpenWithSystem" "ctrl-shift-enter": "workspace::OpenWithSystem"
} }
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"cmd-w": "workspace::CloseWindow",
"escape": "workspace::CloseWindow",
"cmd-m": "settings_editor::Minimize",
"cmd-f": "search::FocusSearch",
"left": "settings_editor::ToggleFocusNav",
"cmd-shift-e": "settings_editor::ToggleFocusNav",
// todo(settings_ui): cut this down based on the max files and overflow UI
"ctrl-1": ["settings_editor::FocusFile", 0],
"ctrl-2": ["settings_editor::FocusFile", 1],
"ctrl-3": ["settings_editor::FocusFile", 2],
"ctrl-4": ["settings_editor::FocusFile", 3],
"ctrl-5": ["settings_editor::FocusFile", 4],
"ctrl-6": ["settings_editor::FocusFile", 5],
"ctrl-7": ["settings_editor::FocusFile", 6],
"ctrl-8": ["settings_editor::FocusFile", 7],
"ctrl-9": ["settings_editor::FocusFile", 8],
"ctrl-0": ["settings_editor::FocusFile", 9],
"cmd-{": "settings_editor::FocusPreviousFile",
"cmd-}": "settings_editor::FocusNextFile"
}
},
{
"context": "StashDiff > Editor",
"use_key_equivalents": true,
"bindings": {
"ctrl-space": "git::ApplyCurrentStash",
"ctrl-shift-space": "git::PopCurrentStash",
"ctrl-shift-backspace": "git::DropCurrentStash"
}
},
{
"context": "SettingsWindow > NavigationMenu",
"use_key_equivalents": true,
"bindings": {
"up": "settings_editor::FocusPreviousNavEntry",
"shift-tab": "settings_editor::FocusPreviousNavEntry",
"down": "settings_editor::FocusNextNavEntry",
"tab": "settings_editor::FocusNextNavEntry",
"right": "settings_editor::ExpandNavEntry",
"left": "settings_editor::CollapseNavEntry",
"pageup": "settings_editor::FocusPreviousRootNavEntry",
"pagedown": "settings_editor::FocusNextRootNavEntry",
"home": "settings_editor::FocusFirstNavEntry",
"end": "settings_editor::FocusLastNavEntry"
}
},
{
"context": "Zeta2Feedback > Editor",
"bindings": {
"enter": "editor::Newline",
"cmd-enter up": "dev::Zeta2RatePredictionPositive",
"cmd-enter down": "dev::Zeta2RatePredictionNegative"
}
},
{
"context": "Zeta2Context > Editor",
"bindings": {
"alt-left": "dev::Zeta2ContextGoBack",
"alt-right": "dev::Zeta2ContextGoForward"
}
} }
] ]

View File

@@ -29,8 +29,8 @@
"ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
"ctrl-,": "zed::OpenSettings", "ctrl-,": "zed::OpenSettingsEditor",
"ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-alt-,": "zed::OpenSettings",
"ctrl-q": "zed::Quit", "ctrl-q": "zed::Quit",
"f4": "debugger::Start", "f4": "debugger::Start",
"shift-f5": "debugger::Stop", "shift-f5": "debugger::Stop",
@@ -134,7 +134,7 @@
"ctrl-k z": "editor::ToggleSoftWrap", "ctrl-k z": "editor::ToggleSoftWrap",
"ctrl-f": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy",
"ctrl-h": "buffer_search::DeployReplace", "ctrl-h": "buffer_search::DeployReplace",
"ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-.": "assistant::QuoteSelection",
"ctrl-shift-,": "assistant::InsertIntoEditor", "ctrl-shift-,": "assistant::InsertIntoEditor",
"shift-alt-e": "editor::SelectEnclosingSymbol", "shift-alt-e": "editor::SelectEnclosingSymbol",
"ctrl-shift-backspace": "editor::GoToPreviousChange", "ctrl-shift-backspace": "editor::GoToPreviousChange",
@@ -244,7 +244,7 @@
"ctrl-shift-i": "agent::ToggleOptionsMenu", "ctrl-shift-i": "agent::ToggleOptionsMenu",
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", // "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor", "shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-.": "assistant::QuoteSelection",
"shift-alt-e": "agent::RemoveAllContext", "shift-alt-e": "agent::RemoveAllContext",
"ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-e": "project_panel::ToggleFocus",
"ctrl-shift-enter": "agent::ContinueThread", "ctrl-shift-enter": "agent::ContinueThread",
@@ -270,7 +270,7 @@
} }
}, },
{ {
"context": "AgentPanel && text_thread", "context": "AgentPanel && prompt_editor",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-n": "agent::NewTextThread", "ctrl-n": "agent::NewTextThread",
@@ -278,7 +278,7 @@
} }
}, },
{ {
"context": "AgentPanel && acp_thread", "context": "AgentPanel && external_agent_thread",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-n": "agent::NewExternalAgentThread", "ctrl-n": "agent::NewExternalAgentThread",
@@ -375,7 +375,7 @@
} }
}, },
{ {
"context": "RulesLibrary", "context": "PromptLibrary",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-n": "rules_library::NewRule", "ctrl-n": "rules_library::NewRule",
@@ -383,6 +383,13 @@
"ctrl-w": "workspace::CloseWindow" "ctrl-w": "workspace::CloseWindow"
} }
}, },
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"ctrl-w": "workspace::CloseWindow"
}
},
{ {
"context": "BufferSearchBar", "context": "BufferSearchBar",
"use_key_equivalents": true, "use_key_equivalents": true,
@@ -500,8 +507,8 @@
"bindings": { "bindings": {
"ctrl-[": "editor::Outdent", "ctrl-[": "editor::Outdent",
"ctrl-]": "editor::Indent", "ctrl-]": "editor::Indent",
"ctrl-shift-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // Insert Cursor Above "ctrl-shift-alt-up": "editor::AddSelectionAbove", // Insert Cursor Above
"ctrl-shift-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // Insert Cursor Below "ctrl-shift-alt-down": "editor::AddSelectionBelow", // Insert Cursor Below
"ctrl-shift-k": "editor::DeleteLine", "ctrl-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp", "alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown", "alt-down": "editor::MoveLineDown",
@@ -536,15 +543,15 @@
"ctrl-k ctrl-l": "editor::ToggleFold", "ctrl-k ctrl-l": "editor::ToggleFold",
"ctrl-k ctrl-[": "editor::FoldRecursive", "ctrl-k ctrl-[": "editor::FoldRecursive",
"ctrl-k ctrl-]": "editor::UnfoldRecursive", "ctrl-k ctrl-]": "editor::UnfoldRecursive",
"ctrl-k ctrl-1": "editor::FoldAtLevel_1", "ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
"ctrl-k ctrl-2": "editor::FoldAtLevel_2", "ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
"ctrl-k ctrl-3": "editor::FoldAtLevel_3", "ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
"ctrl-k ctrl-4": "editor::FoldAtLevel_4", "ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
"ctrl-k ctrl-5": "editor::FoldAtLevel_5", "ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
"ctrl-k ctrl-6": "editor::FoldAtLevel_6", "ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
"ctrl-k ctrl-7": "editor::FoldAtLevel_7", "ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
"ctrl-k ctrl-8": "editor::FoldAtLevel_8", "ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
"ctrl-k ctrl-9": "editor::FoldAtLevel_9", "ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
"ctrl-k ctrl-0": "editor::FoldAll", "ctrl-k ctrl-0": "editor::FoldAll",
"ctrl-k ctrl-j": "editor::UnfoldAll", "ctrl-k ctrl-j": "editor::UnfoldAll",
"ctrl-space": "editor::ShowCompletions", "ctrl-space": "editor::ShowCompletions",
@@ -614,7 +621,7 @@
"ctrl-alt-b": "workspace::ToggleRightDock", "ctrl-alt-b": "workspace::ToggleRightDock",
"ctrl-b": "workspace::ToggleLeftDock", "ctrl-b": "workspace::ToggleLeftDock",
"ctrl-j": "workspace::ToggleBottomDock", "ctrl-j": "workspace::ToggleBottomDock",
"ctrl-shift-y": "workspace::ToggleAllDocks", "ctrl-shift-y": "workspace::CloseAllDocks",
"alt-r": "workspace::ResetActiveDockSize", "alt-r": "workspace::ResetActiveDockSize",
// For 0px parameter, uses UI font size value. // For 0px parameter, uses UI font size value.
"shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }], "shift-alt--": ["workspace::DecreaseActiveDockSize", { "px": 0 }],
@@ -623,7 +630,7 @@
"ctrl-shift-f": "pane::DeploySearch", "ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
"ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-shift-t": "pane::ReopenClosedItem",
"ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-s": "zed::OpenKeymapEditor",
"ctrl-k ctrl-t": "theme_selector::Toggle", "ctrl-k ctrl-t": "theme_selector::Toggle",
"ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-alt-super-p": "settings_profile_selector::Toggle",
"ctrl-t": "project_symbols::Toggle", "ctrl-t": "project_symbols::Toggle",
@@ -736,14 +743,6 @@
"tab": "editor::ComposeCompletion" "tab": "editor::ComposeCompletion"
} }
}, },
{
"context": "Editor && in_snippet",
"use_key_equivalents": true,
"bindings": {
"alt-right": "editor::NextSnippetTabstop",
"alt-left": "editor::PreviousSnippetTabstop"
}
},
// Bindings for accepting edit predictions // Bindings for accepting edit predictions
// //
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
@@ -1038,8 +1037,7 @@
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"alt-up": "collab_panel::MoveChannelUp", "alt-up": "collab_panel::MoveChannelUp",
"alt-down": "collab_panel::MoveChannelDown", "alt-down": "collab_panel::MoveChannelDown"
"alt-enter": "collab_panel::OpenSelectedChannelNotes"
} }
}, },
{ {
@@ -1115,8 +1113,7 @@
"context": "StashList || (StashList > Picker > Editor)", "context": "StashList || (StashList > Picker > Editor)",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-shift-backspace": "stash_picker::DropStashItem", "ctrl-shift-backspace": "stash_picker::DropStashItem"
"ctrl-shift-v": "stash_picker::ShowStashItem"
} }
}, },
{ {
@@ -1127,7 +1124,6 @@
"ctrl-insert": "terminal::Copy", "ctrl-insert": "terminal::Copy",
"ctrl-shift-c": "terminal::Copy", "ctrl-shift-c": "terminal::Copy",
"shift-insert": "terminal::Paste", "shift-insert": "terminal::Paste",
"ctrl-v": "terminal::Paste",
"ctrl-shift-v": "terminal::Paste", "ctrl-shift-v": "terminal::Paste",
"ctrl-enter": "assistant::InlineAssist", "ctrl-enter": "assistant::InlineAssist",
"alt-b": ["terminal::SendText", "\u001bb"], "alt-b": ["terminal::SendText", "\u001bb"],
@@ -1161,14 +1157,7 @@
"ctrl-shift-space": "terminal::ToggleViMode", "ctrl-shift-space": "terminal::ToggleViMode",
"ctrl-shift-r": "terminal::RerunTask", "ctrl-shift-r": "terminal::RerunTask",
"ctrl-alt-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask",
"alt-t": "terminal::RerunTask", "alt-t": "terminal::RerunTask"
"ctrl-shift-5": "pane::SplitRight"
}
},
{
"context": "Terminal && selection",
"bindings": {
"ctrl-c": "terminal::Copy"
} }
}, },
{ {
@@ -1275,74 +1264,12 @@
"context": "Onboarding", "context": "Onboarding",
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"ctrl-1": "onboarding::ActivateBasicsPage",
"ctrl-2": "onboarding::ActivateEditingPage",
"ctrl-3": "onboarding::ActivateAISetupPage",
"ctrl-enter": "onboarding::Finish", "ctrl-enter": "onboarding::Finish",
"alt-shift-l": "onboarding::SignIn", "alt-shift-l": "onboarding::SignIn",
"shift-alt-a": "onboarding::OpenAccount" "shift-alt-a": "onboarding::OpenAccount"
} }
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,
"bindings": {
"ctrl-w": "workspace::CloseWindow",
"escape": "workspace::CloseWindow",
"ctrl-m": "settings_editor::Minimize",
"ctrl-f": "search::FocusSearch",
"left": "settings_editor::ToggleFocusNav",
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
// todo(settings_ui): cut this down based on the max files and overflow UI
"ctrl-1": ["settings_editor::FocusFile", 0],
"ctrl-2": ["settings_editor::FocusFile", 1],
"ctrl-3": ["settings_editor::FocusFile", 2],
"ctrl-4": ["settings_editor::FocusFile", 3],
"ctrl-5": ["settings_editor::FocusFile", 4],
"ctrl-6": ["settings_editor::FocusFile", 5],
"ctrl-7": ["settings_editor::FocusFile", 6],
"ctrl-8": ["settings_editor::FocusFile", 7],
"ctrl-9": ["settings_editor::FocusFile", 8],
"ctrl-0": ["settings_editor::FocusFile", 9],
"ctrl-pageup": "settings_editor::FocusPreviousFile",
"ctrl-pagedown": "settings_editor::FocusNextFile"
}
},
{
"context": "StashDiff > Editor",
"use_key_equivalents": true,
"bindings": {
"ctrl-space": "git::ApplyCurrentStash",
"ctrl-shift-space": "git::PopCurrentStash",
"ctrl-shift-backspace": "git::DropCurrentStash"
}
},
{
"context": "SettingsWindow > NavigationMenu",
"use_key_equivalents": true,
"bindings": {
"up": "settings_editor::FocusPreviousNavEntry",
"shift-tab": "settings_editor::FocusPreviousNavEntry",
"down": "settings_editor::FocusNextNavEntry",
"tab": "settings_editor::FocusNextNavEntry",
"right": "settings_editor::ExpandNavEntry",
"left": "settings_editor::CollapseNavEntry",
"pageup": "settings_editor::FocusPreviousRootNavEntry",
"pagedown": "settings_editor::FocusNextRootNavEntry",
"home": "settings_editor::FocusFirstNavEntry",
"end": "settings_editor::FocusLastNavEntry"
}
},
{
"context": "Zeta2Feedback > Editor",
"bindings": {
"enter": "editor::Newline",
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
}
},
{
"context": "Zeta2Context > Editor",
"bindings": {
"alt-left": "dev::Zeta2ContextGoBack",
"alt-right": "dev::Zeta2ContextGoForward"
}
} }
] ]

View File

@@ -24,8 +24,8 @@
"ctrl-<": "editor::ScrollCursorCenter", // editor:scroll-to-cursor "ctrl-<": "editor::ScrollCursorCenter", // editor:scroll-to-cursor
"f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next "f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next
"shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous "shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous
"alt-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], // editor:add-selection-below "alt-shift-down": "editor::AddSelectionBelow", // editor:add-selection-below
"alt-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], // editor:add-selection-above "alt-shift-up": "editor::AddSelectionAbove", // editor:add-selection-above
"ctrl-j": "editor::JoinLines", // editor:join-lines "ctrl-j": "editor::JoinLines", // editor:join-lines
"ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines "ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines
"ctrl-up": "editor::MoveLineUp", // editor:move-line-up "ctrl-up": "editor::MoveLineUp", // editor:move-line-up

View File

@@ -17,8 +17,8 @@
"bindings": { "bindings": {
"ctrl-i": "agent::ToggleFocus", "ctrl-i": "agent::ToggleFocus",
"ctrl-shift-i": "agent::ToggleFocus", "ctrl-shift-i": "agent::ToggleFocus",
"ctrl-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "ctrl-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode
"ctrl-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "ctrl-l": "agent::QuoteSelection", // In cursor uses "Agent" mode
"ctrl-k": "assistant::InlineAssist", "ctrl-k": "assistant::InlineAssist",
"ctrl-shift-k": "assistant::InsertIntoEditor" "ctrl-shift-k": "assistant::InsertIntoEditor"
} }

View File

@@ -8,23 +8,11 @@
"ctrl-g": "menu::Cancel" "ctrl-g": "menu::Cancel"
} }
}, },
{
// Workaround to avoid falling back to default bindings.
// Unbind so Zed ignores these keys and lets emacs handle them.
// NOTE: must be declared before the `Editor` override.
// NOTE: in macos the 'ctrl-x' 'ctrl-p' and 'ctrl-n' rebindings are not needed, since they default to 'cmd'.
"context": "Editor",
"bindings": {
"ctrl-g": null, // currently activates `go_to_line::Toggle` when there is nothing to cancel
"ctrl-x": null, // currently activates `editor::Cut` if no following key is pressed for 1 second
"ctrl-p": null, // currently activates `file_finder::Toggle` when the cursor is on the first character of the buffer
"ctrl-n": null // currently activates `workspace::NewFile` when the cursor is on the last character of the buffer
}
},
{ {
"context": "Editor", "context": "Editor",
"bindings": { "bindings": {
"ctrl-g": "editor::Cancel", "ctrl-g": "editor::Cancel",
"ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer
"alt-g g": "go_to_line::Toggle", // goto-line "alt-g g": "go_to_line::Toggle", // goto-line
"alt-g alt-g": "go_to_line::Toggle", // goto-line "alt-g alt-g": "go_to_line::Toggle", // goto-line
"ctrl-space": "editor::SetMark", // set-mark "ctrl-space": "editor::SetMark", // set-mark
@@ -41,10 +29,8 @@
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line
"shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line "shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line
"alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation "alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation
"alt-left": "editor::MoveToPreviousWordStart", // left-word "alt-f": "editor::MoveToNextSubwordEnd", // forward-word
"alt-right": "editor::MoveToNextWordEnd", // right-word "alt-b": "editor::MoveToPreviousSubwordStart", // backward-word
"alt-f": "editor::MoveToNextWordEnd", // forward-word
"alt-b": "editor::MoveToPreviousWordStart", // backward-word
"alt-u": "editor::ConvertToUpperCase", // upcase-word "alt-u": "editor::ConvertToUpperCase", // upcase-word
"alt-l": "editor::ConvertToLowerCase", // downcase-word "alt-l": "editor::ConvertToLowerCase", // downcase-word
"alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word "alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word
@@ -57,8 +43,6 @@
"ctrl-x h": "editor::SelectAll", // mark-whole-buffer "ctrl-x h": "editor::SelectAll", // mark-whole-buffer
"ctrl-d": "editor::Delete", // delete-char "ctrl-d": "editor::Delete", // delete-char
"alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word
"alt-backspace": "editor::DeleteToPreviousWordStart", // backward-kill-word
"alt-delete": "editor::DeleteToPreviousWordStart", // backward-kill-word
"ctrl-k": "editor::KillRingCut", // kill-line "ctrl-k": "editor::KillRingCut", // kill-line
"ctrl-w": "editor::Cut", // kill-region "ctrl-w": "editor::Cut", // kill-region
"alt-w": "editor::Copy", // kill-ring-save "alt-w": "editor::Copy", // kill-ring-save
@@ -68,19 +52,14 @@
"ctrl-x u": "editor::Undo", // undo "ctrl-x u": "editor::Undo", // undo
"alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph
"alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-up": "editor::MoveToStartOfParagraph", // backward-paragraph
"ctrl-down": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-v": "editor::MovePageDown", // scroll-up "ctrl-v": "editor::MovePageDown", // scroll-up
"alt-v": "editor::MovePageUp", // scroll-down "alt-v": "editor::MovePageUp", // scroll-down
"ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer
"ctrl-x ]": "editor::MoveToEnd", // end-of-buffer "ctrl-x ]": "editor::MoveToEnd", // end-of-buffer
"alt-<": "editor::MoveToBeginning", // beginning-of-buffer "alt-<": "editor::MoveToBeginning", // beginning-of-buffer
"alt->": "editor::MoveToEnd", // end-of-buffer "alt->": "editor::MoveToEnd", // end-of-buffer
"ctrl-home": "editor::MoveToBeginning", // beginning-of-buffer
"ctrl-end": "editor::MoveToEnd", // end-of-buffer
"ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom "ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom
"ctrl-s": "buffer_search::Deploy", // isearch-forward "ctrl-s": "buffer_search::Deploy", // isearch-forward
"ctrl-r": "buffer_search::Deploy", // isearch-backward
"alt-^": "editor::JoinLines", // join-line "alt-^": "editor::JoinLines", // join-line
"alt-q": "editor::Rewrap" // fill-paragraph "alt-q": "editor::Rewrap" // fill-paragraph
} }
@@ -106,19 +85,10 @@
"end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }],
"ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], "ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }],
"ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }],
"alt-m": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }],
"alt-f": "editor::SelectToNextWordEnd", "alt-f": "editor::SelectToNextWordEnd",
"alt-b": "editor::SelectToPreviousWordStart", "alt-b": "editor::SelectToPreviousSubwordStart",
"alt-{": "editor::SelectToStartOfParagraph",
"alt-}": "editor::SelectToEndOfParagraph",
"ctrl-up": "editor::SelectToStartOfParagraph",
"ctrl-down": "editor::SelectToEndOfParagraph",
"ctrl-x [": "editor::SelectToBeginning",
"ctrl-x ]": "editor::SelectToEnd",
"alt-<": "editor::SelectToBeginning", "alt-<": "editor::SelectToBeginning",
"alt->": "editor::SelectToEnd", "alt->": "editor::SelectToEnd",
"ctrl-home": "editor::SelectToBeginning",
"ctrl-end": "editor::SelectToEnd",
"ctrl-g": "editor::Cancel" "ctrl-g": "editor::Cancel"
} }
}, },
@@ -136,28 +106,15 @@
"ctrl-n": "editor::SignatureHelpNext" "ctrl-n": "editor::SignatureHelpNext"
} }
}, },
// Example setting for using emacs-style tab
// (i.e. indent the current line / selection or perform symbol completion depending on context)
// {
// "context": "Editor && !showing_code_actions && !showing_completions",
// "bindings": {
// "tab": "editor::AutoIndent" // indent-for-tab-command
// }
// },
{ {
"context": "Workspace", "context": "Workspace",
"bindings": { "bindings": {
"alt-x": "command_palette::Toggle", // execute-extended-command
"ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer
"ctrl-x ctrl-b": "tab_switcher::Toggle", // list-buffers
// "ctrl-x ctrl-c": "workspace::CloseWindow" // in case you only want to exit the current Zed instance
"ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal "ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal
"ctrl-x 5 0": "workspace::CloseWindow", // delete-frame "ctrl-x 5 0": "workspace::CloseWindow", // delete-frame
"ctrl-x 5 2": "workspace::NewWindow", // make-frame-command "ctrl-x 5 2": "workspace::NewWindow", // make-frame-command
"ctrl-x o": "workspace::ActivateNextPane", // other-window "ctrl-x o": "workspace::ActivateNextPane", // other-window
"ctrl-x k": "pane::CloseActiveItem", // kill-buffer "ctrl-x k": "pane::CloseActiveItem", // kill-buffer
"ctrl-x 0": "pane::CloseActiveItem", // delete-window "ctrl-x 0": "pane::CloseActiveItem", // delete-window
// "ctrl-x 1": "pane::JoinAll", // in case you prefer to delete the splits but keep the buffers open
"ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows "ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows
"ctrl-x 2": "pane::SplitDown", // split-window-below "ctrl-x 2": "pane::SplitDown", // split-window-below
"ctrl-x 3": "pane::SplitRight", // split-window-right "ctrl-x 3": "pane::SplitRight", // split-window-right
@@ -168,19 +125,10 @@
} }
}, },
{ {
// Workaround to enable using native emacs from the Zed terminal. // Workaround to enable using emacs in the Zed terminal.
// Unbind so Zed ignores these keys and lets emacs handle them. // Unbind so Zed ignores these keys and lets emacs handle them.
// NOTE:
// "terminal::SendKeystroke" only works for a single key stroke (e.g. ctrl-x),
// so override with null for compound sequences (e.g. ctrl-x ctrl-c).
"context": "Terminal", "context": "Terminal",
"bindings": { "bindings": {
// If you want to perfect your emacs-in-zed setup, also consider the following.
// You may need to enable "option_as_meta" from the Zed settings for "alt-x" to work.
// "alt-x": ["terminal::SendKeystroke", "alt-x"],
// "ctrl-x": ["terminal::SendKeystroke", "ctrl-x"],
// "ctrl-n": ["terminal::SendKeystroke", "ctrl-n"],
// ...
"ctrl-x ctrl-c": null, // save-buffers-kill-terminal "ctrl-x ctrl-c": null, // save-buffers-kill-terminal
"ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-f": null, // find-file
"ctrl-x ctrl-s": null, // save-buffer "ctrl-x ctrl-s": null, // save-buffer

View File

@@ -1,7 +1,7 @@
[ [
{ {
"bindings": { "bindings": {
"ctrl-alt-s": "zed::OpenSettingsFile", "ctrl-alt-s": "zed::OpenSettings",
"ctrl-{": "pane::ActivatePreviousItem", "ctrl-{": "pane::ActivatePreviousItem",
"ctrl-}": "pane::ActivateNextItem", "ctrl-}": "pane::ActivateNextItem",
"shift-escape": null, // Unmap workspace::zoom "shift-escape": null, // Unmap workspace::zoom
@@ -91,7 +91,7 @@
{ {
"context": "Workspace", "context": "Workspace",
"bindings": { "bindings": {
"ctrl-shift-f12": "workspace::ToggleAllDocks", "ctrl-shift-f12": "workspace::CloseAllDocks",
"ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
"alt-shift-f10": "task::Spawn", "alt-shift-f10": "task::Spawn",
"ctrl-e": "file_finder::Toggle", "ctrl-e": "file_finder::Toggle",

View File

@@ -28,8 +28,8 @@
{ {
"context": "Editor", "context": "Editor",
"bindings": { "bindings": {
"ctrl-alt-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], "ctrl-alt-up": "editor::AddSelectionAbove",
"ctrl-alt-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], "ctrl-alt-down": "editor::AddSelectionBelow",
"ctrl-shift-up": "editor::MoveLineUp", "ctrl-shift-up": "editor::MoveLineUp",
"ctrl-shift-down": "editor::MoveLineDown", "ctrl-shift-down": "editor::MoveLineDown",
"ctrl-shift-m": "editor::SelectLargerSyntaxNode", "ctrl-shift-m": "editor::SelectLargerSyntaxNode",

View File

@@ -25,8 +25,8 @@
"cmd-<": "editor::ScrollCursorCenter", "cmd-<": "editor::ScrollCursorCenter",
"cmd-g": ["editor::SelectNext", { "replace_newest": true }], "cmd-g": ["editor::SelectNext", { "replace_newest": true }],
"cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }], "cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }],
"ctrl-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": true }], "ctrl-shift-down": "editor::AddSelectionBelow",
"ctrl-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": true }], "ctrl-shift-up": "editor::AddSelectionAbove",
"alt-enter": "editor::Newline", "alt-enter": "editor::Newline",
"cmd-shift-d": "editor::DuplicateLineDown", "cmd-shift-d": "editor::DuplicateLineDown",
"ctrl-cmd-up": "editor::MoveLineUp", "ctrl-cmd-up": "editor::MoveLineUp",

View File

@@ -17,8 +17,8 @@
"bindings": { "bindings": {
"cmd-i": "agent::ToggleFocus", "cmd-i": "agent::ToggleFocus",
"cmd-shift-i": "agent::ToggleFocus", "cmd-shift-i": "agent::ToggleFocus",
"cmd-shift-l": "agent::AddSelectionToThread", // In cursor uses "Ask" mode "cmd-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode
"cmd-l": "agent::AddSelectionToThread", // In cursor uses "Agent" mode "cmd-l": "agent::QuoteSelection", // In cursor uses "Agent" mode
"cmd-k": "assistant::InlineAssist", "cmd-k": "assistant::InlineAssist",
"cmd-shift-k": "assistant::InsertIntoEditor" "cmd-shift-k": "assistant::InsertIntoEditor"
} }

View File

@@ -9,19 +9,11 @@
"ctrl-g": "menu::Cancel" "ctrl-g": "menu::Cancel"
} }
}, },
{
// Workaround to avoid falling back to default bindings.
// Unbind so Zed ignores these keys and lets emacs handle them.
// NOTE: must be declared before the `Editor` override.
"context": "Editor",
"bindings": {
"ctrl-g": null // currently activates `go_to_line::Toggle` when there is nothing to cancel
}
},
{ {
"context": "Editor", "context": "Editor",
"bindings": { "bindings": {
"ctrl-g": "editor::Cancel", "ctrl-g": "editor::Cancel",
"ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer
"alt-g g": "go_to_line::Toggle", // goto-line "alt-g g": "go_to_line::Toggle", // goto-line
"alt-g alt-g": "go_to_line::Toggle", // goto-line "alt-g alt-g": "go_to_line::Toggle", // goto-line
"ctrl-space": "editor::SetMark", // set-mark "ctrl-space": "editor::SetMark", // set-mark
@@ -38,10 +30,8 @@
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], // move-beginning-of-line
"shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line "shift-end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], // move-end-of-line
"alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation "alt-m": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }], // back-to-indentation
"alt-left": "editor::MoveToPreviousWordStart", // left-word "alt-f": "editor::MoveToNextSubwordEnd", // forward-word
"alt-right": "editor::MoveToNextWordEnd", // right-word "alt-b": "editor::MoveToPreviousSubwordStart", // backward-word
"alt-f": "editor::MoveToNextWordEnd", // forward-word
"alt-b": "editor::MoveToPreviousWordStart", // backward-word
"alt-u": "editor::ConvertToUpperCase", // upcase-word "alt-u": "editor::ConvertToUpperCase", // upcase-word
"alt-l": "editor::ConvertToLowerCase", // downcase-word "alt-l": "editor::ConvertToLowerCase", // downcase-word
"alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word "alt-c": "editor::ConvertToUpperCamelCase", // capitalize-word
@@ -54,8 +44,6 @@
"ctrl-x h": "editor::SelectAll", // mark-whole-buffer "ctrl-x h": "editor::SelectAll", // mark-whole-buffer
"ctrl-d": "editor::Delete", // delete-char "ctrl-d": "editor::Delete", // delete-char
"alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word "alt-d": ["editor::DeleteToNextWordEnd", { "ignore_newlines": false, "ignore_brackets": false }], // kill-word
"alt-backspace": "editor::DeleteToPreviousWordStart", // backward-kill-word
"alt-delete": "editor::DeleteToPreviousWordStart", // backward-kill-word
"ctrl-k": "editor::KillRingCut", // kill-line "ctrl-k": "editor::KillRingCut", // kill-line
"ctrl-w": "editor::Cut", // kill-region "ctrl-w": "editor::Cut", // kill-region
"alt-w": "editor::Copy", // kill-ring-save "alt-w": "editor::Copy", // kill-ring-save
@@ -65,19 +53,14 @@
"ctrl-x u": "editor::Undo", // undo "ctrl-x u": "editor::Undo", // undo
"alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph "alt-{": "editor::MoveToStartOfParagraph", // backward-paragraph
"alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph "alt-}": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-up": "editor::MoveToStartOfParagraph", // backward-paragraph
"ctrl-down": "editor::MoveToEndOfParagraph", // forward-paragraph
"ctrl-v": "editor::MovePageDown", // scroll-up "ctrl-v": "editor::MovePageDown", // scroll-up
"alt-v": "editor::MovePageUp", // scroll-down "alt-v": "editor::MovePageUp", // scroll-down
"ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer "ctrl-x [": "editor::MoveToBeginning", // beginning-of-buffer
"ctrl-x ]": "editor::MoveToEnd", // end-of-buffer "ctrl-x ]": "editor::MoveToEnd", // end-of-buffer
"alt-<": "editor::MoveToBeginning", // beginning-of-buffer "alt-<": "editor::MoveToBeginning", // beginning-of-buffer
"alt->": "editor::MoveToEnd", // end-of-buffer "alt->": "editor::MoveToEnd", // end-of-buffer
"ctrl-home": "editor::MoveToBeginning", // beginning-of-buffer
"ctrl-end": "editor::MoveToEnd", // end-of-buffer
"ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom "ctrl-l": "editor::ScrollCursorCenterTopBottom", // recenter-top-bottom
"ctrl-s": "buffer_search::Deploy", // isearch-forward "ctrl-s": "buffer_search::Deploy", // isearch-forward
"ctrl-r": "buffer_search::Deploy", // isearch-backward
"alt-^": "editor::JoinLines", // join-line "alt-^": "editor::JoinLines", // join-line
"alt-q": "editor::Rewrap" // fill-paragraph "alt-q": "editor::Rewrap" // fill-paragraph
} }
@@ -103,19 +86,10 @@
"end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "end": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }],
"ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }], "ctrl-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false }],
"ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }], "ctrl-e": ["editor::SelectToEndOfLine", { "stop_at_soft_wraps": false }],
"alt-m": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": false, "stop_at_indent": true }],
"alt-f": "editor::SelectToNextWordEnd", "alt-f": "editor::SelectToNextWordEnd",
"alt-b": "editor::SelectToPreviousWordStart", "alt-b": "editor::SelectToPreviousSubwordStart",
"alt-{": "editor::SelectToStartOfParagraph",
"alt-}": "editor::SelectToEndOfParagraph",
"ctrl-up": "editor::SelectToStartOfParagraph",
"ctrl-down": "editor::SelectToEndOfParagraph",
"ctrl-x [": "editor::SelectToBeginning",
"ctrl-x ]": "editor::SelectToEnd",
"alt-<": "editor::SelectToBeginning", "alt-<": "editor::SelectToBeginning",
"alt->": "editor::SelectToEnd", "alt->": "editor::SelectToEnd",
"ctrl-home": "editor::SelectToBeginning",
"ctrl-end": "editor::SelectToEnd",
"ctrl-g": "editor::Cancel" "ctrl-g": "editor::Cancel"
} }
}, },
@@ -133,28 +107,15 @@
"ctrl-n": "editor::SignatureHelpNext" "ctrl-n": "editor::SignatureHelpNext"
} }
}, },
// Example setting for using emacs-style tab
// (i.e. indent the current line / selection or perform symbol completion depending on context)
// {
// "context": "Editor && !showing_code_actions && !showing_completions",
// "bindings": {
// "tab": "editor::AutoIndent" // indent-for-tab-command
// }
// },
{ {
"context": "Workspace", "context": "Workspace",
"bindings": { "bindings": {
"alt-x": "command_palette::Toggle", // execute-extended-command
"ctrl-x b": "tab_switcher::Toggle", // switch-to-buffer
"ctrl-x ctrl-b": "tab_switcher::Toggle", // list-buffers
// "ctrl-x ctrl-c": "workspace::CloseWindow" // in case you only want to exit the current Zed instance
"ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal "ctrl-x ctrl-c": "zed::Quit", // save-buffers-kill-terminal
"ctrl-x 5 0": "workspace::CloseWindow", // delete-frame "ctrl-x 5 0": "workspace::CloseWindow", // delete-frame
"ctrl-x 5 2": "workspace::NewWindow", // make-frame-command "ctrl-x 5 2": "workspace::NewWindow", // make-frame-command
"ctrl-x o": "workspace::ActivateNextPane", // other-window "ctrl-x o": "workspace::ActivateNextPane", // other-window
"ctrl-x k": "pane::CloseActiveItem", // kill-buffer "ctrl-x k": "pane::CloseActiveItem", // kill-buffer
"ctrl-x 0": "pane::CloseActiveItem", // delete-window "ctrl-x 0": "pane::CloseActiveItem", // delete-window
// "ctrl-x 1": "pane::JoinAll", // in case you prefer to delete the splits but keep the buffers open
"ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows "ctrl-x 1": "pane::CloseOtherItems", // delete-other-windows
"ctrl-x 2": "pane::SplitDown", // split-window-below "ctrl-x 2": "pane::SplitDown", // split-window-below
"ctrl-x 3": "pane::SplitRight", // split-window-right "ctrl-x 3": "pane::SplitRight", // split-window-right
@@ -165,19 +126,10 @@
} }
}, },
{ {
// Workaround to enable using native emacs from the Zed terminal. // Workaround to enable using emacs in the Zed terminal.
// Unbind so Zed ignores these keys and lets emacs handle them. // Unbind so Zed ignores these keys and lets emacs handle them.
// NOTE:
// "terminal::SendKeystroke" only works for a single key stroke (e.g. ctrl-x),
// so override with null for compound sequences (e.g. ctrl-x ctrl-c).
"context": "Terminal", "context": "Terminal",
"bindings": { "bindings": {
// If you want to perfect your emacs-in-zed setup, also consider the following.
// You may need to enable "option_as_meta" from the Zed settings for "alt-x" to work.
// "alt-x": ["terminal::SendKeystroke", "alt-x"],
// "ctrl-x": ["terminal::SendKeystroke", "ctrl-x"],
// "ctrl-n": ["terminal::SendKeystroke", "ctrl-n"],
// ...
"ctrl-x ctrl-c": null, // save-buffers-kill-terminal "ctrl-x ctrl-c": null, // save-buffers-kill-terminal
"ctrl-x ctrl-f": null, // find-file "ctrl-x ctrl-f": null, // find-file
"ctrl-x ctrl-s": null, // save-buffer "ctrl-x ctrl-s": null, // save-buffer

View File

@@ -93,7 +93,7 @@
{ {
"context": "Workspace", "context": "Workspace",
"bindings": { "bindings": {
"cmd-shift-f12": "workspace::ToggleAllDocks", "cmd-shift-f12": "workspace::CloseAllDocks",
"cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }], "cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
"ctrl-alt-r": "task::Spawn", "ctrl-alt-r": "task::Spawn",
"cmd-e": "file_finder::Toggle", "cmd-e": "file_finder::Toggle",

View File

@@ -28,8 +28,8 @@
{ {
"context": "Editor", "context": "Editor",
"bindings": { "bindings": {
"ctrl-shift-up": ["editor::AddSelectionAbove", { "skip_soft_wrap": false }], "ctrl-shift-up": "editor::AddSelectionAbove",
"ctrl-shift-down": ["editor::AddSelectionBelow", { "skip_soft_wrap": false }], "ctrl-shift-down": "editor::AddSelectionBelow",
"cmd-ctrl-up": "editor::MoveLineUp", "cmd-ctrl-up": "editor::MoveLineUp",
"cmd-ctrl-down": "editor::MoveLineDown", "cmd-ctrl-down": "editor::MoveLineDown",
"cmd-shift-space": "editor::SelectAll", "cmd-shift-space": "editor::SelectAll",

View File

@@ -95,6 +95,8 @@
"g g": "vim::StartOfDocument", "g g": "vim::StartOfDocument",
"g h": "editor::Hover", "g h": "editor::Hover",
"g B": "editor::BlameHover", "g B": "editor::BlameHover",
"g t": "vim::GoToTab",
"g shift-t": "vim::GoToPreviousTab",
"g d": "editor::GoToDefinition", "g d": "editor::GoToDefinition",
"g shift-d": "editor::GoToDeclaration", "g shift-d": "editor::GoToDeclaration",
"g y": "editor::GoToTypeDefinition", "g y": "editor::GoToTypeDefinition",
@@ -220,8 +222,6 @@
"[ {": ["vim::UnmatchedBackward", { "char": "{" }], "[ {": ["vim::UnmatchedBackward", { "char": "{" }],
"] )": ["vim::UnmatchedForward", { "char": ")" }], "] )": ["vim::UnmatchedForward", { "char": ")" }],
"[ (": ["vim::UnmatchedBackward", { "char": "(" }], "[ (": ["vim::UnmatchedBackward", { "char": "(" }],
"[ r": "vim::GoToPreviousReference",
"] r": "vim::GoToNextReference",
// tree-sitter related commands // tree-sitter related commands
"[ x": "vim::SelectLargerSyntaxNode", "[ x": "vim::SelectLargerSyntaxNode",
"] x": "vim::SelectSmallerSyntaxNode" "] x": "vim::SelectSmallerSyntaxNode"
@@ -421,75 +421,59 @@
"ctrl-[": "editor::Cancel" "ctrl-[": "editor::Cancel"
} }
}, },
{
"context": "vim_mode == helix_select && !menu",
"bindings": {
"escape": "vim::SwitchToHelixNormalMode"
}
},
{ {
"context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu", "context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
"bindings": { "bindings": {
// Movement ";": "vim::HelixCollapseSelection",
"h": "vim::WrappingLeft", ":": "command_palette::Toggle",
"m": "vim::PushHelixMatch",
"s": "vim::HelixSelectRegex",
"]": ["vim::PushHelixNext", { "around": true }],
"[": ["vim::PushHelixPrevious", { "around": true }],
"left": "vim::WrappingLeft", "left": "vim::WrappingLeft",
"l": "vim::WrappingRight",
"right": "vim::WrappingRight", "right": "vim::WrappingRight",
"t": ["vim::PushFindForward", { "before": true, "multiline": true }], "h": "vim::WrappingLeft",
"f": ["vim::PushFindForward", { "before": false, "multiline": true }], "l": "vim::WrappingRight",
"shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }],
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }],
"alt-.": "vim::RepeatFind",
// Changes
"shift-r": "editor::Paste",
"`": "vim::ConvertToLowerCase",
"alt-`": "vim::ConvertToUpperCase",
"insert": "vim::InsertBefore",
"shift-u": "editor::Redo",
"ctrl-r": "vim::Redo",
"y": "vim::HelixYank", "y": "vim::HelixYank",
"p": "vim::HelixPaste", "p": "vim::HelixPaste",
"shift-p": ["vim::HelixPaste", { "before": true }], "shift-p": ["vim::HelixPaste", { "before": true }],
"alt-;": "vim::OtherEnd",
"ctrl-r": "vim::Redo",
"f": ["vim::PushFindForward", { "before": false, "multiline": true }],
"t": ["vim::PushFindForward", { "before": true, "multiline": true }],
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }],
"shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }],
">": "vim::Indent", ">": "vim::Indent",
"<": "vim::Outdent", "<": "vim::Outdent",
"=": "vim::AutoIndent", "=": "vim::AutoIndent",
"d": "vim::HelixDelete", "`": "vim::ConvertToLowerCase",
"c": "vim::HelixSubstitute", "alt-`": "vim::ConvertToUpperCase",
"alt-c": "vim::HelixSubstituteNoYank", "g q": "vim::PushRewrap",
"g w": "vim::PushRewrap",
// Selection manipulation "insert": "vim::InsertBefore",
"s": "vim::HelixSelectRegex", "alt-.": "vim::RepeatFind",
"alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }], "alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }],
";": "vim::HelixCollapseSelection",
"alt-;": "vim::OtherEnd",
",": "vim::HelixKeepNewestSelection",
"shift-c": "vim::HelixDuplicateBelow",
"alt-shift-c": "vim::HelixDuplicateAbove",
"%": "editor::SelectAll",
"x": "vim::HelixSelectLine",
"shift-x": "editor::SelectLine",
"ctrl-c": "editor::ToggleComments",
"alt-o": "editor::SelectLargerSyntaxNode",
"alt-i": "editor::SelectSmallerSyntaxNode",
"alt-p": "editor::SelectPreviousSyntaxNode",
"alt-n": "editor::SelectNextSyntaxNode",
// Goto mode // Goto mode
"g e": "vim::EndOfDocument", "g n": "pane::ActivateNextItem",
"g h": "vim::StartOfLine", "g p": "pane::ActivatePreviousItem",
// "tab": "pane::ActivateNextItem",
// "shift-tab": "pane::ActivatePrevItem",
"shift-h": "pane::ActivatePreviousItem",
"shift-l": "pane::ActivateNextItem",
"g l": "vim::EndOfLine", "g l": "vim::EndOfLine",
"g h": "vim::StartOfLine",
"g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s" "g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s"
"g e": "vim::EndOfDocument",
"g .": "vim::HelixGotoLastModification", // go to last modification
"g r": "editor::FindAllReferences", // zed specific
"g t": "vim::WindowTop", "g t": "vim::WindowTop",
"g c": "vim::WindowMiddle", "g c": "vim::WindowMiddle",
"g b": "vim::WindowBottom", "g b": "vim::WindowBottom",
"g r": "editor::FindAllReferences", // zed specific
"g n": "pane::ActivateNextItem",
"shift-l": "pane::ActivateNextItem",
"g p": "pane::ActivatePreviousItem",
"shift-h": "pane::ActivatePreviousItem",
"g .": "vim::HelixGotoLastModification", // go to last modification
"shift-r": "editor::Paste",
"x": "vim::HelixSelectLine",
"shift-x": "editor::SelectLine",
"%": "editor::SelectAll",
// Window mode // Window mode
"space w h": "workspace::ActivatePaneLeft", "space w h": "workspace::ActivatePaneLeft",
"space w l": "workspace::ActivatePaneRight", "space w l": "workspace::ActivatePaneRight",
@@ -500,7 +484,6 @@
"space w r": "pane::SplitRight", "space w r": "pane::SplitRight",
"space w v": "pane::SplitDown", "space w v": "pane::SplitDown",
"space w d": "pane::SplitDown", "space w d": "pane::SplitDown",
// Space mode // Space mode
"space f": "file_finder::Toggle", "space f": "file_finder::Toggle",
"space k": "editor::Hover", "space k": "editor::Hover",
@@ -511,18 +494,14 @@
"space a": "editor::ToggleCodeActions", "space a": "editor::ToggleCodeActions",
"space h": "editor::SelectAllMatches", "space h": "editor::SelectAllMatches",
"space c": "editor::ToggleComments", "space c": "editor::ToggleComments",
"space p": "editor::Paste",
"space y": "editor::Copy", "space y": "editor::Copy",
"space p": "editor::Paste",
// Other "shift-u": "editor::Redo",
":": "command_palette::Toggle", "ctrl-c": "editor::ToggleComments",
"m": "vim::PushHelixMatch", "d": "vim::HelixDelete",
"]": ["vim::PushHelixNext", { "around": true }], "c": "vim::Substitute",
"[": ["vim::PushHelixPrevious", { "around": true }], "shift-c": "editor::AddSelectionBelow",
"g q": "vim::PushRewrap", "alt-shift-c": "editor::AddSelectionAbove"
"g w": "vim::PushRewrap"
// "tab": "pane::ActivateNextItem",
// "shift-tab": "pane::ActivatePrevItem",
} }
}, },
{ {
@@ -601,18 +580,18 @@
// "q": "vim::AnyQuotes", // "q": "vim::AnyQuotes",
"q": "vim::MiniQuotes", "q": "vim::MiniQuotes",
"|": "vim::VerticalBars", "|": "vim::VerticalBars",
"(": ["vim::Parentheses", { "opening": true }], "(": "vim::Parentheses",
")": "vim::Parentheses", ")": "vim::Parentheses",
"b": "vim::Parentheses", "b": "vim::Parentheses",
// "b": "vim::AnyBrackets", // "b": "vim::AnyBrackets",
// "b": "vim::MiniBrackets", // "b": "vim::MiniBrackets",
"[": ["vim::SquareBrackets", { "opening": true }], "[": "vim::SquareBrackets",
"]": "vim::SquareBrackets", "]": "vim::SquareBrackets",
"r": "vim::SquareBrackets", "r": "vim::SquareBrackets",
"{": ["vim::CurlyBrackets", { "opening": true }], "{": "vim::CurlyBrackets",
"}": "vim::CurlyBrackets", "}": "vim::CurlyBrackets",
"shift-b": "vim::CurlyBrackets", "shift-b": "vim::CurlyBrackets",
"<": ["vim::AngleBrackets", { "opening": true }], "<": "vim::AngleBrackets",
">": "vim::AngleBrackets", ">": "vim::AngleBrackets",
"a": "vim::Argument", "a": "vim::Argument",
"i": "vim::IndentObj", "i": "vim::IndentObj",
@@ -832,7 +811,7 @@
} }
}, },
{ {
"context": "VimControl && !menu || !Editor && !Terminal", "context": "VimControl || !Editor && !Terminal",
"bindings": { "bindings": {
// window related commands (ctrl-w X) // window related commands (ctrl-w X)
"ctrl-w": null, "ctrl-w": null,
@@ -852,10 +831,10 @@
"ctrl-w shift-right": "workspace::SwapPaneRight", "ctrl-w shift-right": "workspace::SwapPaneRight",
"ctrl-w shift-up": "workspace::SwapPaneUp", "ctrl-w shift-up": "workspace::SwapPaneUp",
"ctrl-w shift-down": "workspace::SwapPaneDown", "ctrl-w shift-down": "workspace::SwapPaneDown",
"ctrl-w shift-h": "workspace::MovePaneLeft", "ctrl-w shift-h": "workspace::SwapPaneLeft",
"ctrl-w shift-l": "workspace::MovePaneRight", "ctrl-w shift-l": "workspace::SwapPaneRight",
"ctrl-w shift-k": "workspace::MovePaneUp", "ctrl-w shift-k": "workspace::SwapPaneUp",
"ctrl-w shift-j": "workspace::MovePaneDown", "ctrl-w shift-j": "workspace::SwapPaneDown",
"ctrl-w >": "vim::ResizePaneRight", "ctrl-w >": "vim::ResizePaneRight",
"ctrl-w <": "vim::ResizePaneLeft", "ctrl-w <": "vim::ResizePaneLeft",
"ctrl-w -": "vim::ResizePaneDown", "ctrl-w -": "vim::ResizePaneDown",
@@ -886,9 +865,7 @@
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes", "ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
"ctrl-w o": "workspace::CloseInactiveTabsAndPanes", "ctrl-w o": "workspace::CloseInactiveTabsAndPanes",
"ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal", "ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal",
"ctrl-w n": "workspace::NewFileSplitHorizontal", "ctrl-w n": "workspace::NewFileSplitHorizontal"
"g t": "vim::GoToTab",
"g shift-t": "vim::GoToPreviousTab"
} }
}, },
{ {
@@ -991,9 +968,7 @@
"bindings": { "bindings": {
"ctrl-h": "editor::Backspace", "ctrl-h": "editor::Backspace",
"ctrl-u": "editor::DeleteToBeginningOfLine", "ctrl-u": "editor::DeleteToBeginningOfLine",
"ctrl-w": "editor::DeleteToPreviousWordStart", "ctrl-w": "editor::DeleteToPreviousWordStart"
"ctrl-p": "menu::SelectPrevious",
"ctrl-n": "menu::SelectNext"
} }
}, },
{ {
@@ -1025,16 +1000,5 @@
// and Windows. // and Windows.
"alt-l": "editor::AcceptEditPrediction" "alt-l": "editor::AcceptEditPrediction"
} }
},
{
"context": "SettingsWindow > NavigationMenu && !search",
"bindings": {
"l": "settings_editor::ExpandNavEntry",
"h": "settings_editor::CollapseNavEntry",
"k": "settings_editor::FocusPreviousNavEntry",
"j": "settings_editor::FocusNextNavEntry",
"g g": "settings_editor::FocusFirstNavEntry",
"shift-g": "settings_editor::FocusLastNavEntry"
}
} }
] ]

View File

@@ -0,0 +1,179 @@
You are a highly skilled software engineer with extensive knowledge in many programming languages, frameworks, design patterns, and best practices.
## Communication
1. Be conversational but professional.
2. Refer to the user in the second person and yourself in the first person.
3. Format your responses in markdown. Use backticks to format file, directory, function, and class names.
4. NEVER lie or make things up.
5. Refrain from apologizing all the time when results are unexpected. Instead, just try your best to proceed or explain the circumstances to the user without apologizing.
{{#if has_tools}}
## Tool Use
1. Make sure to adhere to the tools schema.
2. Provide every required argument.
3. DO NOT use tools to access items that are already available in the context section.
4. Use only the tools that are currently available.
5. DO NOT use a tool that is not available just because it appears in the conversation. This means the user turned it off.
6. NEVER run commands that don't terminate on their own such as web servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers.
7. Avoid HTML entity escaping - use plain characters instead.
## Searching and Reading
If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions.
{{! TODO: If there are files, we should mention it but otherwise omit that fact }}
If appropriate, use tool calls to explore the current project, which contains the following root directories:
{{#each worktrees}}
- `{{abs_path}}`
{{/each}}
- Bias towards not asking the user for help if you can find the answer yourself.
- When providing paths to tools, the path should always start with the name of a project root directory listed above.
- Before you read or edit a file, you must first find the full path. DO NOT ever guess a file path!
{{# if (has_tool 'grep') }}
- When looking for symbols in the project, prefer the `grep` tool.
- As you learn about the structure of the project, use that information to scope `grep` searches to targeted subtrees of the project.
- The user might specify a partial file path. If you don't know the full path, use `find_path` (not `grep`) before you read the file.
{{/if}}
{{else}}
You are being tasked with providing a response, but you have no ability to use tools or to read or write any aspect of the user's system (other than any context the user might have provided to you).
As such, if you need the user to perform any actions for you, you must request them explicitly. Bias towards giving a response to the best of your ability, and then making requests for the user to take action (e.g. to give you more context) only optionally.
The one exception to this is if the user references something you don't know about - for example, the name of a source code file, function, type, or other piece of code that you have no awareness of. In this case, you MUST NOT MAKE SOMETHING UP, or assume you know what that thing is or how it works. Instead, you must ask the user for clarification rather than giving a response.
{{/if}}
## Code Block Formatting
Whenever you mention a code block, you MUST use ONLY use the following format:
```path/to/Something.blah#L123-456
(code goes here)
```
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
is a path in the project. (If there is no valid path in the project, then you can use
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
does not understand the more common ```language syntax, or bare ``` blocks. It only
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
You have made a mistake. You can only ever put paths after triple backticks!
<example>
Based on all the information I've gathered, here's a summary of how this system works:
1. The README file is loaded into the system.
2. The system finds the first two headers, including everything in between. In this case, that would be:
```path/to/README.md#L8-12
# First Header
This is the info under the first header.
## Sub-header
```
3. Then the system finds the last header in the README:
```path/to/README.md#L27-29
## Last Header
This is the last header in the README.
```
4. Finally, it passes this information on to the next process.
</example>
<example>
In Markdown, hash marks signify headings. For example:
```/dev/null/example.md#L1-3
# Level 1 heading
## Level 2 heading
### Level 3 heading
```
</example>
Here are examples of ways you must never render code blocks:
<bad_example_do_not_do_this>
In Markdown, hash marks signify headings. For example:
```
# Level 1 heading
## Level 2 heading
### Level 3 heading
```
</bad_example_do_not_do_this>
This example is unacceptable because it does not include the path.
<bad_example_do_not_do_this>
In Markdown, hash marks signify headings. For example:
```markdown
# Level 1 heading
## Level 2 heading
### Level 3 heading
```
</bad_example_do_not_do_this>
This example is unacceptable because it has the language instead of the path.
<bad_example_do_not_do_this>
In Markdown, hash marks signify headings. For example:
# Level 1 heading
## Level 2 heading
### Level 3 heading
</bad_example_do_not_do_this>
This example is unacceptable because it uses indentation to mark the code block
instead of backticks with a path.
<bad_example_do_not_do_this>
In Markdown, hash marks signify headings. For example:
```markdown
/dev/null/example.md#L1-3
# Level 1 heading
## Level 2 heading
### Level 3 heading
```
</bad_example_do_not_do_this>
This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks.
{{#if has_tools}}
## Fixing Diagnostics
1. Make 1-2 attempts at fixing diagnostics, then defer to the user.
2. Never simplify code you've written just to solve diagnostics. Complete, mostly correct code is more valuable than perfect code that doesn't solve the problem.
## Debugging
When debugging, only make code changes if you are certain that you can solve the problem.
Otherwise, follow debugging best practices:
1. Address the root cause instead of the symptoms.
2. Add descriptive logging statements and error messages to track variable and code state.
3. Add test functions and statements to isolate the problem.
{{/if}}
## Calling External APIs
1. Unless explicitly requested by the user, use the best suited external APIs and packages to solve the task. There is no need to ask the user for permission.
2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data.
3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed)
## System Information
Operating System: {{os}}
Default Shell: {{shell}}
{{#if (or has_rules has_user_rules)}}
## User's Custom Instructions
The following additional instructions are provided by the user, and should be followed to the best of your ability{{#if has_tools}} without interfering with the tool use guidelines{{/if}}.
{{#if has_rules}}
There are project rules that apply to these root directories:
{{#each worktrees}}
{{#if rules_file}}
`{{root_name}}/{{rules_file.path_in_worktree}}`:
``````
{{{rules_file.text}}}
``````
{{/if}}
{{/each}}
{{/if}}
{{#if has_user_rules}}
The user has specified the following rules that should be applied:
{{#each user_rules}}
{{#if title}}
Rules title: {{title}}
{{/if}}
``````
{{contents}}
``````
{{/each}}
{{/if}}
{{/if}}

View File

@@ -1,8 +1,7 @@
{ {
"$schema": "zed://schemas/settings", /// The displayed name of this project. If not set or empty, the root directory name
/// The displayed name of this project. If not set or null, the root directory name
/// will be displayed. /// will be displayed.
"project_name": null, "project_name": "",
// The name of the Zed theme to use for the UI. // The name of the Zed theme to use for the UI.
// //
// `mode` is one of: // `mode` is one of:
@@ -77,7 +76,7 @@
"ui_font_size": 16, "ui_font_size": 16,
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset. // The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
"agent_ui_font_size": null, "agent_ui_font_size": null,
// The default font size for user messages in the agent panel. // The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
"agent_buffer_font_size": 12, "agent_buffer_font_size": 12,
// How much to fade out unused code. // How much to fade out unused code.
"unnecessary_code_fade": 0.3, "unnecessary_code_fade": 0.3,
@@ -311,11 +310,11 @@
"use_on_type_format": true, "use_on_type_format": true,
// Whether to automatically add matching closing characters when typing // Whether to automatically add matching closing characters when typing
// opening parenthesis, bracket, brace, single or double quote characters. // opening parenthesis, bracket, brace, single or double quote characters.
// For example, when you type '(', Zed will add a closing ) at the correct position. // For example, when you type (, Zed will add a closing ) at the correct position.
"use_autoclose": true, "use_autoclose": true,
// Whether to automatically surround selected text when typing opening parenthesis, // Whether to automatically surround selected text when typing opening parenthesis,
// bracket, brace, single or double quote characters. // bracket, brace, single or double quote characters.
// For example, when you select text and type '(', Zed will surround the text with (). // For example, when you select text and type (, Zed will surround the text with ().
"use_auto_surround": true, "use_auto_surround": true,
// Whether indentation should be adjusted based on the context whilst typing. // Whether indentation should be adjusted based on the context whilst typing.
"auto_indent": true, "auto_indent": true,
@@ -722,11 +721,7 @@
// Whether to enable drag-and-drop operations in the project panel. // Whether to enable drag-and-drop operations in the project panel.
"drag_and_drop": true, "drag_and_drop": true,
// Whether to hide the root entry when only one folder is open in the window. // Whether to hide the root entry when only one folder is open in the window.
"hide_root": false, "hide_root": false
// Whether to hide the hidden entries in the project panel.
"hide_hidden": false,
// Whether to automatically open files when pasting them in the project panel.
"open_file_on_paste": true
}, },
"outline_panel": { "outline_panel": {
// Whether to show the outline panel button in the status bar // Whether to show the outline panel button in the status bar
@@ -884,6 +879,8 @@
// Note: This setting has no effect on external agents that support permission modes, such as Claude Code. // Note: This setting has no effect on external agents that support permission modes, such as Claude Code.
// You can set `agent_servers.claude.default_mode` to `bypassPermissions` to skip all permission requests. // You can set `agent_servers.claude.default_mode` to `bypassPermissions` to skip all permission requests.
"always_allow_tool_actions": false, "always_allow_tool_actions": false,
// When enabled, the agent will stream edits.
"stream_edits": false,
// When enabled, agent edits will be displayed in single-file editors for review // When enabled, agent edits will be displayed in single-file editors for review
"single_file_review": true, "single_file_review": true,
// When enabled, show voting thumbs for feedback on agent edits. // When enabled, show voting thumbs for feedback on agent edits.
@@ -906,7 +903,6 @@
"now": true, "now": true,
"find_path": true, "find_path": true,
"read_file": true, "read_file": true,
"open": true,
"grep": true, "grep": true,
"terminal": true, "terminal": true,
"thinking": true, "thinking": true,
@@ -918,6 +914,7 @@
// We don't know which of the context server tools are safe for the "Ask" profile, so we don't enable them by default. // We don't know which of the context server tools are safe for the "Ask" profile, so we don't enable them by default.
// "enable_all_context_servers": true, // "enable_all_context_servers": true,
"tools": { "tools": {
"contents": true,
"diagnostics": true, "diagnostics": true,
"fetch": true, "fetch": true,
"list_directory": true, "list_directory": true,
@@ -1091,10 +1088,10 @@
// Only the file Zed had indexed will be used, not necessary all the gitignored files. // Only the file Zed had indexed will be used, not necessary all the gitignored files.
// //
// Can accept 3 values: // Can accept 3 values:
// * "all": Use all gitignored files // * `true`: Use all gitignored files
// * "indexed": Use only the files Zed had indexed // * `false`: Use only the files Zed had indexed
// * "smart": Be smart and search for ignored when called from a gitignored worktree // * `null`: Be smart and search for ignored when called from a gitignored worktree
"include_ignored": "smart" "include_ignored": null
}, },
// Whether or not to remove any trailing whitespace from lines of a buffer // Whether or not to remove any trailing whitespace from lines of a buffer
// before saving it. // before saving it.
@@ -1104,31 +1101,25 @@
// Removes any lines containing only whitespace at the end of the file and // Removes any lines containing only whitespace at the end of the file and
// ensures just one newline at the end. // ensures just one newline at the end.
"ensure_final_newline_on_save": true, "ensure_final_newline_on_save": true,
// Whether or not to perform a buffer format before saving: [on, off] // Whether or not to perform a buffer format before saving: [on, off, prettier, language_server]
// Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored // Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored
"format_on_save": "on", "format_on_save": "on",
// How to perform a buffer format. This setting can take multiple values: // How to perform a buffer format. This setting can take 4 values:
// //
// 1. Default. Format files using Zed's Prettier integration (if applicable), // 1. Format code using the current language server:
// or falling back to formatting via language server:
// "formatter": "auto"
// 2. Format code using the current language server:
// "formatter": "language_server" // "formatter": "language_server"
// 3. Format code using a specific language server: // 2. Format code using an external command:
// "formatter": {"language_server": {"name": "ruff"}}
// 4. Format code using an external command:
// "formatter": { // "formatter": {
// "external": { // "external": {
// "command": "prettier", // "command": "prettier",
// "arguments": ["--stdin-filepath", "{buffer_path}"] // "arguments": ["--stdin-filepath", "{buffer_path}"]
// } // }
// } // }
// 5. Format code using Zed's Prettier integration: // 3. Format code using Zed's Prettier integration:
// "formatter": "prettier" // "formatter": "prettier"
// 6. Format code using a code action // 4. Default. Format files using Zed's Prettier integration (if applicable),
// "formatter": {"code_action": "source.fixAll.eslint"} // or falling back to formatting via language server:
// 7. An array of any format step specified above to apply in order // "formatter": "auto"
// "formatter": [{"code_action": "source.fixAll.eslint"}, "prettier"]
"formatter": "auto", "formatter": "auto",
// How to soft-wrap long lines of text. // How to soft-wrap long lines of text.
// Possible values: // Possible values:
@@ -1242,8 +1233,8 @@
"git_gutter": "tracked_files", "git_gutter": "tracked_files",
/// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter. /// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
/// ///
/// Default: 0 /// Default: null
"gutter_debounce": 0, "gutter_debounce": null,
// Control whether the git blame information is shown inline, // Control whether the git blame information is shown inline,
// in the currently focused line. // in the currently focused line.
"inline_blame": { "inline_blame": {
@@ -1320,18 +1311,15 @@
// "proxy": "", // "proxy": "",
// "proxy_no_verify": false // "proxy_no_verify": false
// }, // },
// Whether edit predictions are enabled when editing text threads.
// This setting has no effect if globally disabled.
"enabled_in_text_threads": true,
"copilot": { "copilot": {
"enterprise_uri": null, "enterprise_uri": null,
"proxy": null, "proxy": null,
"proxy_no_verify": null "proxy_no_verify": null
}, }
"codestral": {
"model": null,
"max_tokens": null
},
// Whether edit predictions are enabled when editing text threads in the agent panel.
// This setting has no effect if globally disabled.
"enabled_in_text_threads": true
}, },
// Settings specific to journaling // Settings specific to journaling
"journal": { "journal": {
@@ -1350,9 +1338,7 @@
// Whether to show the active language button in the status bar. // Whether to show the active language button in the status bar.
"active_language_button": true, "active_language_button": true,
// Whether to show the cursor position button in the status bar. // Whether to show the cursor position button in the status bar.
"cursor_position_button": true, "cursor_position_button": true
// Whether to show active line endings button in the status bar.
"line_endings_button": false
}, },
// Settings specific to the terminal // Settings specific to the terminal
"terminal": { "terminal": {
@@ -1415,8 +1401,8 @@
// 4. A box drawn around the following character // 4. A box drawn around the following character
// "hollow" // "hollow"
// //
// Default: "block" // Default: not set, defaults to "block"
"cursor_shape": "block", "cursor_shape": null,
// Set whether Alternate Scroll mode (code: ?1007) is active by default. // Set whether Alternate Scroll mode (code: ?1007) is active by default.
// Alternate Scroll mode converts mouse scroll events into up / down key // Alternate Scroll mode converts mouse scroll events into up / down key
// presses when in the alternate screen (e.g. when running applications // presses when in the alternate screen (e.g. when running applications
@@ -1438,8 +1424,8 @@
// Whether or not selecting text in the terminal will automatically // Whether or not selecting text in the terminal will automatically
// copy to the system clipboard. // copy to the system clipboard.
"copy_on_select": false, "copy_on_select": false,
// Whether to keep the text selection after copying it to the clipboard. // Whether to keep the text selection after copying it to the clipboard
"keep_selection_on_copy": true, "keep_selection_on_copy": false,
// Whether to show the terminal button in the status bar // Whether to show the terminal button in the status bar
"button": true, "button": true,
// Any key-value pairs added to this list will be added to the terminal's // Any key-value pairs added to this list will be added to the terminal's
@@ -1559,7 +1545,6 @@
// //
"file_types": { "file_types": {
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"], "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"],
"Markdown": [".rules", ".cursorrules", ".windsurfrules", ".clinerules"],
"Shell Script": [".env.*"] "Shell Script": [".env.*"]
}, },
// Settings for which version of Node.js and NPM to use when installing // Settings for which version of Node.js and NPM to use when installing
@@ -1700,7 +1685,6 @@
"preferred_line_length": 72 "preferred_line_length": 72
}, },
"Go": { "Go": {
"hard_tabs": true,
"code_actions_on_format": { "code_actions_on_format": {
"source.organizeImports": true "source.organizeImports": true
}, },
@@ -1742,7 +1726,7 @@
} }
}, },
"Kotlin": { "Kotlin": {
"language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."] "language_servers": ["kotlin-language-server", "!kotlin-lsp", "..."]
}, },
"LaTeX": { "LaTeX": {
"formatter": "language_server", "formatter": "language_server",
@@ -1770,20 +1754,15 @@
} }
}, },
"Plain Text": { "Plain Text": {
"allow_rewrap": "anywhere", "allow_rewrap": "anywhere"
"soft_wrap": "editor_width"
}, },
"Python": { "Python": {
"code_actions_on_format": {
"source.organizeImports.ruff": true
},
"formatter": { "formatter": {
"language_server": { "language_server": {
"name": "ruff" "name": "ruff"
} }
}, },
"debuggers": ["Debugpy"], "debuggers": ["Debugpy"]
"language_servers": ["basedpyright", "ruff", "!ty", "!pyrefly", "!pyright", "!pylsp", "..."]
}, },
"Ruby": { "Ruby": {
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."] "language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."]
@@ -1825,11 +1804,10 @@
}, },
"SystemVerilog": { "SystemVerilog": {
"format_on_save": "off", "format_on_save": "off",
"language_servers": ["!slang", "..."],
"use_on_type_format": false "use_on_type_format": false
}, },
"Vue.js": { "Vue.js": {
"language_servers": ["vue-language-server", "vtsls", "..."], "language_servers": ["vue-language-server", "..."],
"prettier": { "prettier": {
"allowed": true "allowed": true
} }
@@ -1933,11 +1911,6 @@
// DAP Specific settings. // DAP Specific settings.
"dap": { "dap": {
// Specify the DAP name as a key here. // Specify the DAP name as a key here.
"CodeLLDB": {
"env": {
"RUST_LOG": "info"
}
}
}, },
// Common language server settings. // Common language server settings.
"global_lsp_settings": { "global_lsp_settings": {
@@ -2078,7 +2051,7 @@
// } // }
// } // }
// } // }
"profiles": {}, "profiles": [],
// A map of log scopes to the desired log level. // A map of log scopes to the desired log level.
// Useful for filtering out noisy logs or enabling more verbose logging. // Useful for filtering out noisy logs or enabling more verbose logging.

View File

@@ -6,8 +6,8 @@
{ {
"name": "Gruvbox Dark", "name": "Gruvbox Dark",
"appearance": "dark", "appearance": "dark",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#5b534dff", "border": "#5b534dff",
"border.variant": "#494340ff", "border.variant": "#494340ff",
"border.focused": "#303a36ff", "border.focused": "#303a36ff",
@@ -49,9 +49,8 @@
"panel.background": "#3a3735ff", "panel.background": "#3a3735ff",
"panel.focused_border": "#83a598ff", "panel.focused_border": "#83a598ff",
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#83a598ac", "scrollbar.thumb.background": "#fbf1c74c",
"scrollbar.thumb.hover_background": "#fbf1c74c", "scrollbar.thumb.hover_background": "#494340ff",
"scrollbar.thumb.background": "#a899844c",
"scrollbar.thumb.border": "#494340ff", "scrollbar.thumb.border": "#494340ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#373432ff", "scrollbar.track.border": "#373432ff",
@@ -412,8 +411,8 @@
{ {
"name": "Gruvbox Dark Hard", "name": "Gruvbox Dark Hard",
"appearance": "dark", "appearance": "dark",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#5b534dff", "border": "#5b534dff",
"border.variant": "#494340ff", "border.variant": "#494340ff",
"border.focused": "#303a36ff", "border.focused": "#303a36ff",
@@ -455,9 +454,8 @@
"panel.background": "#393634ff", "panel.background": "#393634ff",
"panel.focused_border": "#83a598ff", "panel.focused_border": "#83a598ff",
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#83a598ac", "scrollbar.thumb.background": "#fbf1c74c",
"scrollbar.thumb.hover_background": "#fbf1c74c", "scrollbar.thumb.hover_background": "#494340ff",
"scrollbar.thumb.background": "#a899844c",
"scrollbar.thumb.border": "#494340ff", "scrollbar.thumb.border": "#494340ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#343130ff", "scrollbar.track.border": "#343130ff",
@@ -818,8 +816,8 @@
{ {
"name": "Gruvbox Dark Soft", "name": "Gruvbox Dark Soft",
"appearance": "dark", "appearance": "dark",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#5b534dff", "border": "#5b534dff",
"border.variant": "#494340ff", "border.variant": "#494340ff",
"border.focused": "#303a36ff", "border.focused": "#303a36ff",
@@ -861,9 +859,8 @@
"panel.background": "#3b3735ff", "panel.background": "#3b3735ff",
"panel.focused_border": null, "panel.focused_border": null,
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#83a598ac", "scrollbar.thumb.background": "#fbf1c74c",
"scrollbar.thumb.hover_background": "#fbf1c74c", "scrollbar.thumb.hover_background": "#494340ff",
"scrollbar.thumb.background": "#a899844c",
"scrollbar.thumb.border": "#494340ff", "scrollbar.thumb.border": "#494340ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#393634ff", "scrollbar.track.border": "#393634ff",
@@ -1224,8 +1221,8 @@
{ {
"name": "Gruvbox Light", "name": "Gruvbox Light",
"appearance": "light", "appearance": "light",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#c8b899ff", "border": "#c8b899ff",
"border.variant": "#ddcca7ff", "border.variant": "#ddcca7ff",
"border.focused": "#adc5ccff", "border.focused": "#adc5ccff",
@@ -1267,9 +1264,8 @@
"panel.background": "#ecddb4ff", "panel.background": "#ecddb4ff",
"panel.focused_border": null, "panel.focused_border": null,
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#458588ac", "scrollbar.thumb.background": "#2828284c",
"scrollbar.thumb.hover_background": "#2828284c", "scrollbar.thumb.hover_background": "#ddcca7ff",
"scrollbar.thumb.background": "#7c6f644c",
"scrollbar.thumb.border": "#ddcca7ff", "scrollbar.thumb.border": "#ddcca7ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#eee0b7ff", "scrollbar.track.border": "#eee0b7ff",
@@ -1630,8 +1626,8 @@
{ {
"name": "Gruvbox Light Hard", "name": "Gruvbox Light Hard",
"appearance": "light", "appearance": "light",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#c8b899ff", "border": "#c8b899ff",
"border.variant": "#ddcca7ff", "border.variant": "#ddcca7ff",
"border.focused": "#adc5ccff", "border.focused": "#adc5ccff",
@@ -1673,9 +1669,8 @@
"panel.background": "#ecddb5ff", "panel.background": "#ecddb5ff",
"panel.focused_border": null, "panel.focused_border": null,
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#458588ac", "scrollbar.thumb.background": "#2828284c",
"scrollbar.thumb.hover_background": "#2828284c", "scrollbar.thumb.hover_background": "#ddcca7ff",
"scrollbar.thumb.background": "#7c6f644c",
"scrollbar.thumb.border": "#ddcca7ff", "scrollbar.thumb.border": "#ddcca7ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#eee1bbff", "scrollbar.track.border": "#eee1bbff",
@@ -2036,8 +2031,8 @@
{ {
"name": "Gruvbox Light Soft", "name": "Gruvbox Light Soft",
"appearance": "light", "appearance": "light",
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"style": { "style": {
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
"border": "#c8b899ff", "border": "#c8b899ff",
"border.variant": "#ddcca7ff", "border.variant": "#ddcca7ff",
"border.focused": "#adc5ccff", "border.focused": "#adc5ccff",
@@ -2079,9 +2074,8 @@
"panel.background": "#ecdcb3ff", "panel.background": "#ecdcb3ff",
"panel.focused_border": null, "panel.focused_border": null,
"pane.focused_border": null, "pane.focused_border": null,
"scrollbar.thumb.active_background": "#458588ac", "scrollbar.thumb.background": "#2828284c",
"scrollbar.thumb.hover_background": "#2828284c", "scrollbar.thumb.hover_background": "#ddcca7ff",
"scrollbar.thumb.background": "#7c6f644c",
"scrollbar.thumb.border": "#ddcca7ff", "scrollbar.thumb.border": "#ddcca7ff",
"scrollbar.track.background": "#00000000", "scrollbar.track.background": "#00000000",
"scrollbar.track.border": "#eddeb5ff", "scrollbar.track.border": "#eddeb5ff",

View File

@@ -1,21 +0,0 @@
ARG NAMESPACE_BASE_IMAGE_REF=""
# Your image must build FROM NAMESPACE_BASE_IMAGE_REF
FROM ${NAMESPACE_BASE_IMAGE_REF} AS base
# Remove problematic git-lfs packagecloud source
RUN sudo rm -f /etc/apt/sources.list.d/*git-lfs*.list
# Install git and SSH for cloning private repositories
RUN sudo apt-get update && \
sudo apt-get install -y git openssh-client
# Clone the Zed repository
RUN git clone https://github.com/zed-industries/zed.git ~/zed
# Run the Linux installation script
WORKDIR /home/runner/zed
RUN ./script/linux
# Clean up unnecessary files to reduce image size
RUN sudo apt-get clean && sudo rm -rf \
/home/runner/zed

View File

@@ -3,17 +3,12 @@ avoid-breaking-exported-api = false
ignore-interior-mutability = [ ignore-interior-mutability = [
# Suppresses clippy::mutable_key_type, which is a false positive as the Eq # Suppresses clippy::mutable_key_type, which is a false positive as the Eq
# and Hash impls do not use fields with interior mutability. # and Hash impls do not use fields with interior mutability.
"agent_ui::context::AgentContextKey" "agent::context::AgentContextKey"
] ]
disallowed-methods = [ disallowed-methods = [
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
{ path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" },
{ path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" },
{ path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" },
{ path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
{ path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
] ]
disallowed-types = [ disallowed-types = [
# { path = "std::collections::HashMap", replacement = "collections::HashMap" }, # { path = "std::collections::HashMap", replacement = "collections::HashMap" },

View File

@@ -33,6 +33,32 @@ services:
volumes: volumes:
- ./livekit.yaml:/livekit.yaml - ./livekit.yaml:/livekit.yaml
postgrest_app:
image: docker.io/postgrest/postgrest
container_name: postgrest_app
ports:
- 8081:8081
environment:
PGRST_DB_URI: postgres://postgres@postgres:5432/zed
volumes:
- ./crates/collab/postgrest_app.conf:/etc/postgrest.conf
command: postgrest /etc/postgrest.conf
depends_on:
- postgres
postgrest_llm:
image: docker.io/postgrest/postgrest
container_name: postgrest_llm
ports:
- 8082:8082
environment:
PGRST_DB_URI: postgres://postgres@postgres:5432/zed_llm
volumes:
- ./crates/collab/postgrest_llm.conf:/etc/postgrest.conf
command: postgrest /etc/postgrest.conf
depends_on:
- postgres
stripe-mock: stripe-mock:
image: docker.io/stripe/stripe-mock:v0.178.0 image: docker.io/stripe/stripe-mock:v0.178.0
ports: ports:

View File

@@ -45,6 +45,7 @@ url.workspace = true
util.workspace = true util.workspace = true
uuid.workspace = true uuid.workspace = true
watch.workspace = true watch.workspace = true
workspace-hack.workspace = true
[dev-dependencies] [dev-dependencies]
env_logger.workspace = true env_logger.workspace = true

View File

@@ -35,7 +35,7 @@ use std::rc::Rc;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; use std::{fmt::Display, mem, path::PathBuf, sync::Arc};
use ui::App; use ui::App;
use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use util::{ResultExt, get_default_system_shell_preferring_bash};
use uuid::Uuid; use uuid::Uuid;
#[derive(Debug)] #[derive(Debug)]
@@ -95,14 +95,9 @@ pub enum AssistantMessageChunk {
} }
impl AssistantMessageChunk { impl AssistantMessageChunk {
pub fn from_str( pub fn from_str(chunk: &str, language_registry: &Arc<LanguageRegistry>, cx: &mut App) -> Self {
chunk: &str,
language_registry: &Arc<LanguageRegistry>,
path_style: PathStyle,
cx: &mut App,
) -> Self {
Self::Message { Self::Message {
block: ContentBlock::new(chunk.into(), language_registry, path_style, cx), block: ContentBlock::new(chunk.into(), language_registry, cx),
} }
} }
@@ -191,7 +186,6 @@ impl ToolCall {
tool_call: acp::ToolCall, tool_call: acp::ToolCall,
status: ToolCallStatus, status: ToolCallStatus,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
path_style: PathStyle,
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>, terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App, cx: &mut App,
) -> Result<Self> { ) -> Result<Self> {
@@ -205,7 +199,6 @@ impl ToolCall {
content.push(ToolCallContent::from_acp( content.push(ToolCallContent::from_acp(
item, item,
language_registry.clone(), language_registry.clone(),
path_style,
terminals, terminals,
cx, cx,
)?); )?);
@@ -230,7 +223,6 @@ impl ToolCall {
&mut self, &mut self,
fields: acp::ToolCallUpdateFields, fields: acp::ToolCallUpdateFields,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
path_style: PathStyle,
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>, terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App, cx: &mut App,
) -> Result<()> { ) -> Result<()> {
@@ -268,13 +260,12 @@ impl ToolCall {
// Reuse existing content if we can // Reuse existing content if we can
for (old, new) in self.content.iter_mut().zip(content.by_ref()) { for (old, new) in self.content.iter_mut().zip(content.by_ref()) {
old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?; old.update_from_acp(new, language_registry.clone(), terminals, cx)?;
} }
for new in content { for new in content {
self.content.push(ToolCallContent::from_acp( self.content.push(ToolCallContent::from_acp(
new, new,
language_registry.clone(), language_registry.clone(),
path_style,
terminals, terminals,
cx, cx,
)?) )?)
@@ -337,7 +328,7 @@ impl ToolCall {
location: acp::ToolCallLocation, location: acp::ToolCallLocation,
project: WeakEntity<Project>, project: WeakEntity<Project>,
cx: &mut AsyncApp, cx: &mut AsyncApp,
) -> Option<ResolvedLocation> { ) -> Option<AgentLocation> {
let buffer = project let buffer = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project project
@@ -359,14 +350,17 @@ impl ToolCall {
}) })
.ok()?; .ok()?;
Some(ResolvedLocation { buffer, position }) Some(AgentLocation {
buffer: buffer.downgrade(),
position,
})
} }
fn resolve_locations( fn resolve_locations(
&self, &self,
project: Entity<Project>, project: Entity<Project>,
cx: &mut App, cx: &mut App,
) -> Task<Vec<Option<ResolvedLocation>>> { ) -> Task<Vec<Option<AgentLocation>>> {
let locations = self.locations.clone(); let locations = self.locations.clone();
project.update(cx, |_, cx| { project.update(cx, |_, cx| {
cx.spawn(async move |project, cx| { cx.spawn(async move |project, cx| {
@@ -380,23 +374,6 @@ impl ToolCall {
} }
} }
// Separate so we can hold a strong reference to the buffer
// for saving on the thread
#[derive(Clone, Debug, PartialEq, Eq)]
struct ResolvedLocation {
buffer: Entity<Buffer>,
position: Anchor,
}
impl From<&ResolvedLocation> for AgentLocation {
fn from(value: &ResolvedLocation) -> Self {
Self {
buffer: value.buffer.downgrade(),
position: value.position,
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub enum ToolCallStatus { pub enum ToolCallStatus {
/// The tool call hasn't started running yet, but we start showing it to /// The tool call hasn't started running yet, but we start showing it to
@@ -459,23 +436,21 @@ impl ContentBlock {
pub fn new( pub fn new(
block: acp::ContentBlock, block: acp::ContentBlock,
language_registry: &Arc<LanguageRegistry>, language_registry: &Arc<LanguageRegistry>,
path_style: PathStyle,
cx: &mut App, cx: &mut App,
) -> Self { ) -> Self {
let mut this = Self::Empty; let mut this = Self::Empty;
this.append(block, language_registry, path_style, cx); this.append(block, language_registry, cx);
this this
} }
pub fn new_combined( pub fn new_combined(
blocks: impl IntoIterator<Item = acp::ContentBlock>, blocks: impl IntoIterator<Item = acp::ContentBlock>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
path_style: PathStyle,
cx: &mut App, cx: &mut App,
) -> Self { ) -> Self {
let mut this = Self::Empty; let mut this = Self::Empty;
for block in blocks { for block in blocks {
this.append(block, &language_registry, path_style, cx); this.append(block, &language_registry, cx);
} }
this this
} }
@@ -484,7 +459,6 @@ impl ContentBlock {
&mut self, &mut self,
block: acp::ContentBlock, block: acp::ContentBlock,
language_registry: &Arc<LanguageRegistry>, language_registry: &Arc<LanguageRegistry>,
path_style: PathStyle,
cx: &mut App, cx: &mut App,
) { ) {
if matches!(self, ContentBlock::Empty) if matches!(self, ContentBlock::Empty)
@@ -494,7 +468,7 @@ impl ContentBlock {
return; return;
} }
let new_content = self.block_string_contents(block, path_style); let new_content = self.block_string_contents(block);
match self { match self {
ContentBlock::Empty => { ContentBlock::Empty => {
@@ -504,7 +478,7 @@ impl ContentBlock {
markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx)); markdown.update(cx, |markdown, cx| markdown.append(&new_content, cx));
} }
ContentBlock::ResourceLink { resource_link } => { ContentBlock::ResourceLink { resource_link } => {
let existing_content = Self::resource_link_md(&resource_link.uri, path_style); let existing_content = Self::resource_link_md(&resource_link.uri);
let combined = format!("{}\n{}", existing_content, new_content); let combined = format!("{}\n{}", existing_content, new_content);
*self = Self::create_markdown_block(combined, language_registry, cx); *self = Self::create_markdown_block(combined, language_registry, cx);
@@ -523,11 +497,11 @@ impl ContentBlock {
} }
} }
fn block_string_contents(&self, block: acp::ContentBlock, path_style: PathStyle) -> String { fn block_string_contents(&self, block: acp::ContentBlock) -> String {
match block { match block {
acp::ContentBlock::Text(text_content) => text_content.text, acp::ContentBlock::Text(text_content) => text_content.text,
acp::ContentBlock::ResourceLink(resource_link) => { acp::ContentBlock::ResourceLink(resource_link) => {
Self::resource_link_md(&resource_link.uri, path_style) Self::resource_link_md(&resource_link.uri)
} }
acp::ContentBlock::Resource(acp::EmbeddedResource { acp::ContentBlock::Resource(acp::EmbeddedResource {
resource: resource:
@@ -536,14 +510,14 @@ impl ContentBlock {
.. ..
}), }),
.. ..
}) => Self::resource_link_md(&uri, path_style), }) => Self::resource_link_md(&uri),
acp::ContentBlock::Image(image) => Self::image_md(&image), acp::ContentBlock::Image(image) => Self::image_md(&image),
acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(), acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(),
} }
} }
fn resource_link_md(uri: &str, path_style: PathStyle) -> String { fn resource_link_md(uri: &str) -> String {
if let Some(uri) = MentionUri::parse(uri, path_style).log_err() { if let Some(uri) = MentionUri::parse(uri).log_err() {
uri.as_link().to_string() uri.as_link().to_string()
} else { } else {
uri.to_string() uri.to_string()
@@ -589,7 +563,6 @@ impl ToolCallContent {
pub fn from_acp( pub fn from_acp(
content: acp::ToolCallContent, content: acp::ToolCallContent,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
path_style: PathStyle,
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>, terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App, cx: &mut App,
) -> Result<Self> { ) -> Result<Self> {
@@ -597,7 +570,6 @@ impl ToolCallContent {
acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new( acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new(
content, content,
&language_registry, &language_registry,
path_style,
cx, cx,
))), ))),
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| { acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
@@ -621,7 +593,6 @@ impl ToolCallContent {
&mut self, &mut self,
new: acp::ToolCallContent, new: acp::ToolCallContent,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
path_style: PathStyle,
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>, terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
cx: &mut App, cx: &mut App,
) -> Result<()> { ) -> Result<()> {
@@ -637,7 +608,7 @@ impl ToolCallContent {
}; };
if needs_update { if needs_update {
*self = Self::from_acp(new, language_registry, path_style, terminals, cx)?; *self = Self::from_acp(new, language_registry, terminals, cx)?;
} }
Ok(()) Ok(())
} }
@@ -1120,13 +1091,13 @@ impl AcpThread {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<(), acp::Error> { ) -> Result<(), acp::Error> {
match update { match update {
acp::SessionUpdate::UserMessageChunk(acp::ContentChunk { content, .. }) => { acp::SessionUpdate::UserMessageChunk { content } => {
self.push_user_content_block(None, content, cx); self.push_user_content_block(None, content, cx);
} }
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { content, .. }) => { acp::SessionUpdate::AgentMessageChunk { content } => {
self.push_assistant_content_block(content, false, cx); self.push_assistant_content_block(content, false, cx);
} }
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { content, .. }) => { acp::SessionUpdate::AgentThoughtChunk { content } => {
self.push_assistant_content_block(content, true, cx); self.push_assistant_content_block(content, true, cx);
} }
acp::SessionUpdate::ToolCall(tool_call) => { acp::SessionUpdate::ToolCall(tool_call) => {
@@ -1138,14 +1109,12 @@ impl AcpThread {
acp::SessionUpdate::Plan(plan) => { acp::SessionUpdate::Plan(plan) => {
self.update_plan(plan, cx); self.update_plan(plan, cx);
} }
acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate { acp::SessionUpdate::AvailableCommandsUpdate { available_commands } => {
available_commands, cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands))
.. }
}) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)), acp::SessionUpdate::CurrentModeUpdate { current_mode_id } => {
acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate { cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id))
current_mode_id, }
..
}) => cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id)),
} }
Ok(()) Ok(())
} }
@@ -1157,7 +1126,6 @@ impl AcpThread {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
let language_registry = self.project.read(cx).languages().clone(); let language_registry = self.project.read(cx).languages().clone();
let path_style = self.project.read(cx).path_style(cx);
let entries_len = self.entries.len(); let entries_len = self.entries.len();
if let Some(last_entry) = self.entries.last_mut() if let Some(last_entry) = self.entries.last_mut()
@@ -1169,12 +1137,12 @@ impl AcpThread {
}) = last_entry }) = last_entry
{ {
*id = message_id.or(id.take()); *id = message_id.or(id.take());
content.append(chunk.clone(), &language_registry, path_style, cx); content.append(chunk.clone(), &language_registry, cx);
chunks.push(chunk); chunks.push(chunk);
let idx = entries_len - 1; let idx = entries_len - 1;
cx.emit(AcpThreadEvent::EntryUpdated(idx)); cx.emit(AcpThreadEvent::EntryUpdated(idx));
} else { } else {
let content = ContentBlock::new(chunk.clone(), &language_registry, path_style, cx); let content = ContentBlock::new(chunk.clone(), &language_registry, cx);
self.push_entry( self.push_entry(
AgentThreadEntry::UserMessage(UserMessage { AgentThreadEntry::UserMessage(UserMessage {
id: message_id, id: message_id,
@@ -1194,7 +1162,6 @@ impl AcpThread {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
let language_registry = self.project.read(cx).languages().clone(); let language_registry = self.project.read(cx).languages().clone();
let path_style = self.project.read(cx).path_style(cx);
let entries_len = self.entries.len(); let entries_len = self.entries.len();
if let Some(last_entry) = self.entries.last_mut() if let Some(last_entry) = self.entries.last_mut()
&& let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) = last_entry
@@ -1204,10 +1171,10 @@ impl AcpThread {
match (chunks.last_mut(), is_thought) { match (chunks.last_mut(), is_thought) {
(Some(AssistantMessageChunk::Message { block }), false) (Some(AssistantMessageChunk::Message { block }), false)
| (Some(AssistantMessageChunk::Thought { block }), true) => { | (Some(AssistantMessageChunk::Thought { block }), true) => {
block.append(chunk, &language_registry, path_style, cx) block.append(chunk, &language_registry, cx)
} }
_ => { _ => {
let block = ContentBlock::new(chunk, &language_registry, path_style, cx); let block = ContentBlock::new(chunk, &language_registry, cx);
if is_thought { if is_thought {
chunks.push(AssistantMessageChunk::Thought { block }) chunks.push(AssistantMessageChunk::Thought { block })
} else { } else {
@@ -1216,7 +1183,7 @@ impl AcpThread {
} }
} }
} else { } else {
let block = ContentBlock::new(chunk, &language_registry, path_style, cx); let block = ContentBlock::new(chunk, &language_registry, cx);
let chunk = if is_thought { let chunk = if is_thought {
AssistantMessageChunk::Thought { block } AssistantMessageChunk::Thought { block }
} else { } else {
@@ -1268,7 +1235,6 @@ impl AcpThread {
) -> Result<()> { ) -> Result<()> {
let update = update.into(); let update = update.into();
let languages = self.project.read(cx).languages().clone(); let languages = self.project.read(cx).languages().clone();
let path_style = self.project.read(cx).path_style(cx);
let ix = match self.index_for_tool_call(update.id()) { let ix = match self.index_for_tool_call(update.id()) {
Some(ix) => ix, Some(ix) => ix,
@@ -1285,7 +1251,6 @@ impl AcpThread {
meta: None, meta: None,
}), }),
&languages, &languages,
path_style,
cx, cx,
))], ))],
status: ToolCallStatus::Failed, status: ToolCallStatus::Failed,
@@ -1305,7 +1270,7 @@ impl AcpThread {
match update { match update {
ToolCallUpdate::UpdateFields(update) => { ToolCallUpdate::UpdateFields(update) => {
let location_updated = update.fields.locations.is_some(); let location_updated = update.fields.locations.is_some();
call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?; call.update_fields(update.fields, languages, &self.terminals, cx)?;
if location_updated { if location_updated {
self.resolve_locations(update.id, cx); self.resolve_locations(update.id, cx);
} }
@@ -1344,7 +1309,6 @@ impl AcpThread {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Result<(), acp::Error> { ) -> Result<(), acp::Error> {
let language_registry = self.project.read(cx).languages().clone(); let language_registry = self.project.read(cx).languages().clone();
let path_style = self.project.read(cx).path_style(cx);
let id = update.id.clone(); let id = update.id.clone();
if let Some(ix) = self.index_for_tool_call(&id) { if let Some(ix) = self.index_for_tool_call(&id) {
@@ -1352,13 +1316,7 @@ impl AcpThread {
unreachable!() unreachable!()
}; };
call.update_fields( call.update_fields(update.fields, language_registry, &self.terminals, cx)?;
update.fields,
language_registry,
path_style,
&self.terminals,
cx,
)?;
call.status = status; call.status = status;
cx.emit(AcpThreadEvent::EntryUpdated(ix)); cx.emit(AcpThreadEvent::EntryUpdated(ix));
@@ -1367,7 +1325,6 @@ impl AcpThread {
update.try_into()?, update.try_into()?,
status, status,
language_registry, language_registry,
self.project.read(cx).path_style(cx),
&self.terminals, &self.terminals,
cx, cx,
)?; )?;
@@ -1436,46 +1393,35 @@ impl AcpThread {
let task = tool_call.resolve_locations(project, cx); let task = tool_call.resolve_locations(project, cx);
cx.spawn(async move |this, cx| { cx.spawn(async move |this, cx| {
let resolved_locations = task.await; let resolved_locations = task.await;
this.update(cx, |this, cx| { this.update(cx, |this, cx| {
let project = this.project.clone(); let project = this.project.clone();
for location in resolved_locations.iter().flatten() {
this.shared_buffers
.insert(location.buffer.clone(), location.buffer.read(cx).snapshot());
}
let Some((ix, tool_call)) = this.tool_call_mut(&id) else { let Some((ix, tool_call)) = this.tool_call_mut(&id) else {
return; return;
}; };
if let Some(Some(location)) = resolved_locations.last() { if let Some(Some(location)) = resolved_locations.last() {
project.update(cx, |project, cx| { project.update(cx, |project, cx| {
let should_ignore = if let Some(agent_location) = project if let Some(agent_location) = project.agent_location() {
.agent_location() let should_ignore = agent_location.buffer == location.buffer
.filter(|agent_location| agent_location.buffer == location.buffer) && location
{ .buffer
let snapshot = location.buffer.read(cx).snapshot(); .update(cx, |buffer, _| {
let old_position = agent_location.position.to_point(&snapshot); let snapshot = buffer.snapshot();
let new_position = location.position.to_point(&snapshot); let old_position =
agent_location.position.to_point(&snapshot);
// ignore this so that when we get updates from the edit tool let new_position = location.position.to_point(&snapshot);
// the position doesn't reset to the startof line // ignore this so that when we get updates from the edit tool
old_position.row == new_position.row // the position doesn't reset to the startof line
&& old_position.column > new_position.column old_position.row == new_position.row
} else { && old_position.column > new_position.column
false })
}; .ok()
if !should_ignore { .unwrap_or_default();
project.set_agent_location(Some(location.into()), cx); if !should_ignore {
project.set_agent_location(Some(location.clone()), cx);
}
} }
}); });
} }
let resolved_locations = resolved_locations
.iter()
.map(|l| l.as_ref().map(|l| AgentLocation::from(l)))
.collect::<Vec<_>>();
if tool_call.resolved_locations != resolved_locations { if tool_call.resolved_locations != resolved_locations {
tool_call.resolved_locations = resolved_locations; tool_call.resolved_locations = resolved_locations;
cx.emit(AcpThreadEvent::EntryUpdated(ix)); cx.emit(AcpThreadEvent::EntryUpdated(ix));
@@ -1647,7 +1593,6 @@ impl AcpThread {
let block = ContentBlock::new_combined( let block = ContentBlock::new_combined(
message.clone(), message.clone(),
self.project.read(cx).languages().clone(), self.project.read(cx).languages().clone(),
self.project.read(cx).path_style(cx),
cx, cx,
); );
let request = acp::PromptRequest { let request = acp::PromptRequest {
@@ -2167,7 +2112,6 @@ impl AcpThread {
let project = self.project.clone(); let project = self.project.clone();
let language_registry = project.read(cx).languages().clone(); let language_registry = project.read(cx).languages().clone();
let is_windows = project.read(cx).path_style(cx).is_windows();
let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into()); let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into());
let terminal_task = cx.spawn({ let terminal_task = cx.spawn({
@@ -2181,10 +2125,9 @@ impl AcpThread {
.and_then(|r| r.read(cx).default_system_shell()) .and_then(|r| r.read(cx).default_system_shell())
})? })?
.unwrap_or_else(|| get_default_system_shell_preferring_bash()); .unwrap_or_else(|| get_default_system_shell_preferring_bash());
let (task_command, task_args) = let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell))
ShellBuilder::new(&Shell::Program(shell), is_windows) .redirect_stdin_to_dev_null()
.redirect_stdin_to_dev_null() .build(Some(command.clone()), &args);
.build(Some(command.clone()), &args);
let terminal = project let terminal = project
.update(cx, |project, cx| { .update(cx, |project, cx| {
project.create_terminal_task( project.create_terminal_task(
@@ -2616,19 +2559,17 @@ mod tests {
thread.update(&mut cx, |thread, cx| { thread.update(&mut cx, |thread, cx| {
thread thread
.handle_session_update( .handle_session_update(
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { acp::SessionUpdate::AgentThoughtChunk {
content: "Thinking ".into(), content: "Thinking ".into(),
meta: None, },
}),
cx, cx,
) )
.unwrap(); .unwrap();
thread thread
.handle_session_update( .handle_session_update(
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk { acp::SessionUpdate::AgentThoughtChunk {
content: "hard!".into(), content: "hard!".into(),
meta: None, },
}),
cx, cx,
) )
.unwrap(); .unwrap();
@@ -3127,10 +3068,9 @@ mod tests {
thread.update(&mut cx, |thread, cx| { thread.update(&mut cx, |thread, cx| {
thread thread
.handle_session_update( .handle_session_update(
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { acp::SessionUpdate::AgentMessageChunk {
content: content.text.to_uppercase().into(), content: content.text.to_uppercase().into(),
meta: None, },
}),
cx, cx,
) )
.unwrap(); .unwrap();
@@ -3487,10 +3427,9 @@ mod tests {
thread.update(&mut cx, |thread, cx| { thread.update(&mut cx, |thread, cx| {
thread thread
.handle_session_update( .handle_session_update(
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk { acp::SessionUpdate::AgentMessageChunk {
content: content.text.to_uppercase().into(), content: content.text.to_uppercase().into(),
meta: None, },
}),
cx, cx,
) )
.unwrap(); .unwrap();

View File

@@ -236,21 +236,21 @@ impl PendingDiff {
fn finalize(&self, cx: &mut Context<Diff>) -> FinalizedDiff { fn finalize(&self, cx: &mut Context<Diff>) -> FinalizedDiff {
let ranges = self.excerpt_ranges(cx); let ranges = self.excerpt_ranges(cx);
let base_text = self.base_text.clone(); let base_text = self.base_text.clone();
let new_buffer = self.new_buffer.read(cx); let language_registry = self.new_buffer.read(cx).language_registry();
let language_registry = new_buffer.language_registry();
let path = new_buffer let path = self
.new_buffer
.read(cx)
.file() .file()
.map(|file| file.path().display(file.path_style(cx))) .map(|file| file.path().display(file.path_style(cx)))
.unwrap_or("untitled".into()) .unwrap_or("untitled".into())
.into(); .into();
let replica_id = new_buffer.replica_id();
// Replace the buffer in the multibuffer with the snapshot // Replace the buffer in the multibuffer with the snapshot
let buffer = cx.new(|cx| { let buffer = cx.new(|cx| {
let language = self.new_buffer.read(cx).language().cloned(); let language = self.new_buffer.read(cx).language().cloned();
let buffer = TextBuffer::new_normalized( let buffer = TextBuffer::new_normalized(
replica_id, 0,
cx.entity_id().as_non_zero_u64().into(), cx.entity_id().as_non_zero_u64().into(),
self.new_buffer.read(cx).line_ending(), self.new_buffer.read(cx).line_ending(),
self.new_buffer.read(cx).as_rope().clone(), self.new_buffer.read(cx).as_rope().clone(),
@@ -361,12 +361,10 @@ async fn build_buffer_diff(
) -> Result<Entity<BufferDiff>> { ) -> Result<Entity<BufferDiff>> {
let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
let executor = cx.background_executor().clone();
let old_text_rope = cx let old_text_rope = cx
.background_spawn({ .background_spawn({
let old_text = old_text.clone(); let old_text = old_text.clone();
let executor = executor.clone(); async move { Rope::from(old_text.as_str()) }
async move { Rope::from_str(old_text.as_str(), &executor) }
}) })
.await; .await;
let base_buffer = cx let base_buffer = cx

View File

@@ -7,10 +7,10 @@ use std::{
fmt, fmt,
ops::RangeInclusive, ops::RangeInclusive,
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr,
}; };
use ui::{App, IconName, SharedString}; use ui::{App, IconName, SharedString};
use url::Url; use url::Url;
use util::paths::PathStyle;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub enum MentionUri { pub enum MentionUri {
@@ -49,7 +49,7 @@ pub enum MentionUri {
} }
impl MentionUri { impl MentionUri {
pub fn parse(input: &str, path_style: PathStyle) -> Result<Self> { pub fn parse(input: &str) -> Result<Self> {
fn parse_line_range(fragment: &str) -> Result<RangeInclusive<u32>> { fn parse_line_range(fragment: &str) -> Result<RangeInclusive<u32>> {
let range = fragment let range = fragment
.strip_prefix("L") .strip_prefix("L")
@@ -74,34 +74,25 @@ impl MentionUri {
let path = url.path(); let path = url.path();
match url.scheme() { match url.scheme() {
"file" => { "file" => {
let path = if path_style.is_windows() { let path = url.to_file_path().ok().context("Extracting file path")?;
path.trim_start_matches("/")
} else {
path
};
if let Some(fragment) = url.fragment() { if let Some(fragment) = url.fragment() {
let line_range = parse_line_range(fragment)?; let line_range = parse_line_range(fragment)?;
if let Some(name) = single_query_param(&url, "symbol")? { if let Some(name) = single_query_param(&url, "symbol")? {
Ok(Self::Symbol { Ok(Self::Symbol {
name, name,
abs_path: path.into(), abs_path: path,
line_range, line_range,
}) })
} else { } else {
Ok(Self::Selection { Ok(Self::Selection {
abs_path: Some(path.into()), abs_path: Some(path),
line_range, line_range,
}) })
} }
} else if input.ends_with("/") { } else if input.ends_with("/") {
Ok(Self::Directory { Ok(Self::Directory { abs_path: path })
abs_path: path.into(),
})
} else { } else {
Ok(Self::File { Ok(Self::File { abs_path: path })
abs_path: path.into(),
})
} }
} }
"zed" => { "zed" => {
@@ -222,14 +213,18 @@ impl MentionUri {
pub fn to_uri(&self) -> Url { pub fn to_uri(&self) -> Url {
match self { match self {
MentionUri::File { abs_path } => { MentionUri::File { abs_path } => {
let mut url = Url::parse("file:///").unwrap(); let mut url = Url::parse("zed:///").unwrap();
url.set_path(&abs_path.to_string_lossy()); url.set_path("/agent/file");
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
url url
} }
MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
MentionUri::Directory { abs_path } => { MentionUri::Directory { abs_path } => {
let mut url = Url::parse("file:///").unwrap(); let mut url = Url::parse("zed:///").unwrap();
url.set_path(&abs_path.to_string_lossy()); url.set_path("/agent/directory");
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
url url
} }
MentionUri::Symbol { MentionUri::Symbol {
@@ -237,9 +232,10 @@ impl MentionUri {
name, name,
line_range, line_range,
} => { } => {
let mut url = Url::parse("file:///").unwrap(); let mut url = Url::parse("zed:///").unwrap();
url.set_path(&abs_path.to_string_lossy()); url.set_path(&format!("/agent/symbol/{name}"));
url.query_pairs_mut().append_pair("symbol", name); url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
url.set_fragment(Some(&format!( url.set_fragment(Some(&format!(
"L{}:{}", "L{}:{}",
line_range.start() + 1, line_range.start() + 1,
@@ -251,14 +247,13 @@ impl MentionUri {
abs_path, abs_path,
line_range, line_range,
} => { } => {
let mut url = if let Some(path) = abs_path { let mut url = Url::parse("zed:///").unwrap();
let mut url = Url::parse("file:///").unwrap(); if let Some(abs_path) = abs_path {
url.set_path(&path.to_string_lossy()); url.set_path("/agent/selection");
url url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
} else { } else {
let mut url = Url::parse("zed:///").unwrap();
url.set_path("/agent/untitled-buffer"); url.set_path("/agent/untitled-buffer");
url
}; };
url.set_fragment(Some(&format!( url.set_fragment(Some(&format!(
"L{}:{}", "L{}:{}",
@@ -293,6 +288,14 @@ impl MentionUri {
} }
} }
impl FromStr for MentionUri {
type Err = anyhow::Error;
fn from_str(s: &str) -> anyhow::Result<Self> {
Self::parse(s)
}
}
pub struct MentionLink<'a>(&'a MentionUri); pub struct MentionLink<'a>(&'a MentionUri);
impl fmt::Display for MentionLink<'_> { impl fmt::Display for MentionLink<'_> {
@@ -335,81 +338,93 @@ mod tests {
#[test] #[test]
fn test_parse_file_uri() { fn test_parse_file_uri() {
let file_uri = uri!("file:///path/to/file.rs"); let old_uri = uri!("file:///path/to/file.rs");
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(old_uri).unwrap();
match &parsed { match &parsed {
MentionUri::File { abs_path } => { MentionUri::File { abs_path } => {
assert_eq!(abs_path, Path::new(path!("/path/to/file.rs"))); assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs"));
} }
_ => panic!("Expected File variant"), _ => panic!("Expected File variant"),
} }
assert_eq!(parsed.to_uri().to_string(), file_uri); let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/file"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
} }
#[test] #[test]
fn test_parse_directory_uri() { fn test_parse_directory_uri() {
let file_uri = uri!("file:///path/to/dir/"); let old_uri = uri!("file:///path/to/dir/");
let parsed = MentionUri::parse(file_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(old_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Directory { abs_path } => { MentionUri::Directory { abs_path } => {
assert_eq!(abs_path, Path::new(path!("/path/to/dir/"))); assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/"));
} }
_ => panic!("Expected Directory variant"), _ => panic!("Expected Directory variant"),
} }
assert_eq!(parsed.to_uri().to_string(), file_uri); let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/directory"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
} }
#[test] #[test]
fn test_to_directory_uri_without_slash() { fn test_to_directory_uri_without_slash() {
let uri = MentionUri::Directory { let uri = MentionUri::Directory {
abs_path: PathBuf::from(path!("/path/to/dir/")), abs_path: PathBuf::from(path!("/path/to/dir")),
}; };
let expected = uri!("file:///path/to/dir/"); let uri_string = uri.to_uri().to_string();
assert_eq!(uri.to_uri().to_string(), expected); assert!(uri_string.starts_with("zed:///agent/directory"));
assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri);
} }
#[test] #[test]
fn test_parse_symbol_uri() { fn test_parse_symbol_uri() {
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
let parsed = MentionUri::parse(symbol_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(old_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Symbol { MentionUri::Symbol {
abs_path: path, abs_path: path,
name, name,
line_range, line_range,
} => { } => {
assert_eq!(path, Path::new(path!("/path/to/file.rs"))); assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs"));
assert_eq!(name, "MySymbol"); assert_eq!(name, "MySymbol");
assert_eq!(line_range.start(), &9); assert_eq!(line_range.start(), &9);
assert_eq!(line_range.end(), &19); assert_eq!(line_range.end(), &19);
} }
_ => panic!("Expected Symbol variant"), _ => panic!("Expected Symbol variant"),
} }
assert_eq!(parsed.to_uri().to_string(), symbol_uri); let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
} }
#[test] #[test]
fn test_parse_selection_uri() { fn test_parse_selection_uri() {
let selection_uri = uri!("file:///path/to/file.rs#L5:15"); let old_uri = uri!("file:///path/to/file.rs#L5:15");
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(old_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Selection { MentionUri::Selection {
abs_path: path, abs_path: path,
line_range, line_range,
} => { } => {
assert_eq!(path.as_ref().unwrap(), Path::new(path!("/path/to/file.rs"))); assert_eq!(
path.as_ref().unwrap().to_str().unwrap(),
path!("/path/to/file.rs")
);
assert_eq!(line_range.start(), &4); assert_eq!(line_range.start(), &4);
assert_eq!(line_range.end(), &14); assert_eq!(line_range.end(), &14);
} }
_ => panic!("Expected Selection variant"), _ => panic!("Expected Selection variant"),
} }
assert_eq!(parsed.to_uri().to_string(), selection_uri); let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/selection"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
} }
#[test] #[test]
fn test_parse_untitled_selection_uri() { fn test_parse_untitled_selection_uri() {
let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10"); let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10");
let parsed = MentionUri::parse(selection_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(selection_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Selection { MentionUri::Selection {
abs_path: None, abs_path: None,
@@ -426,7 +441,7 @@ mod tests {
#[test] #[test]
fn test_parse_thread_uri() { fn test_parse_thread_uri() {
let thread_uri = "zed:///agent/thread/session123?name=Thread+name"; let thread_uri = "zed:///agent/thread/session123?name=Thread+name";
let parsed = MentionUri::parse(thread_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(thread_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Thread { MentionUri::Thread {
id: thread_id, id: thread_id,
@@ -443,7 +458,7 @@ mod tests {
#[test] #[test]
fn test_parse_rule_uri() { fn test_parse_rule_uri() {
let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule"; let rule_uri = "zed:///agent/rule/d8694ff2-90d5-4b6f-be33-33c1763acd52?name=Some+rule";
let parsed = MentionUri::parse(rule_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(rule_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Rule { id, name } => { MentionUri::Rule { id, name } => {
assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52"); assert_eq!(id.to_string(), "d8694ff2-90d5-4b6f-be33-33c1763acd52");
@@ -457,7 +472,7 @@ mod tests {
#[test] #[test]
fn test_parse_fetch_http_uri() { fn test_parse_fetch_http_uri() {
let http_uri = "http://example.com/path?query=value#fragment"; let http_uri = "http://example.com/path?query=value#fragment";
let parsed = MentionUri::parse(http_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(http_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Fetch { url } => { MentionUri::Fetch { url } => {
assert_eq!(url.to_string(), http_uri); assert_eq!(url.to_string(), http_uri);
@@ -470,7 +485,7 @@ mod tests {
#[test] #[test]
fn test_parse_fetch_https_uri() { fn test_parse_fetch_https_uri() {
let https_uri = "https://example.com/api/endpoint"; let https_uri = "https://example.com/api/endpoint";
let parsed = MentionUri::parse(https_uri, PathStyle::local()).unwrap(); let parsed = MentionUri::parse(https_uri).unwrap();
match &parsed { match &parsed {
MentionUri::Fetch { url } => { MentionUri::Fetch { url } => {
assert_eq!(url.to_string(), https_uri); assert_eq!(url.to_string(), https_uri);
@@ -482,55 +497,40 @@ mod tests {
#[test] #[test]
fn test_invalid_scheme() { fn test_invalid_scheme() {
assert!(MentionUri::parse("ftp://example.com", PathStyle::local()).is_err()); assert!(MentionUri::parse("ftp://example.com").is_err());
assert!(MentionUri::parse("ssh://example.com", PathStyle::local()).is_err()); assert!(MentionUri::parse("ssh://example.com").is_err());
assert!(MentionUri::parse("unknown://example.com", PathStyle::local()).is_err()); assert!(MentionUri::parse("unknown://example.com").is_err());
} }
#[test] #[test]
fn test_invalid_zed_path() { fn test_invalid_zed_path() {
assert!(MentionUri::parse("zed:///invalid/path", PathStyle::local()).is_err()); assert!(MentionUri::parse("zed:///invalid/path").is_err());
assert!(MentionUri::parse("zed:///agent/unknown/test", PathStyle::local()).is_err()); assert!(MentionUri::parse("zed:///agent/unknown/test").is_err());
} }
#[test] #[test]
fn test_invalid_line_range_format() { fn test_invalid_line_range_format() {
// Missing L prefix // Missing L prefix
assert!( assert!(MentionUri::parse(uri!("file:///path/to/file.rs#10:20")).is_err());
MentionUri::parse(uri!("file:///path/to/file.rs#10:20"), PathStyle::local()).is_err()
);
// Missing colon separator // Missing colon separator
assert!( assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1020")).is_err());
MentionUri::parse(uri!("file:///path/to/file.rs#L1020"), PathStyle::local()).is_err()
);
// Invalid numbers // Invalid numbers
assert!( assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc")).is_err());
MentionUri::parse(uri!("file:///path/to/file.rs#L10:abc"), PathStyle::local()).is_err() assert!(MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20")).is_err());
);
assert!(
MentionUri::parse(uri!("file:///path/to/file.rs#Labc:20"), PathStyle::local()).is_err()
);
} }
#[test] #[test]
fn test_invalid_query_parameters() { fn test_invalid_query_parameters() {
// Invalid query parameter name // Invalid query parameter name
assert!( assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L10:20?invalid=test")).is_err());
MentionUri::parse(
uri!("file:///path/to/file.rs#L10:20?invalid=test"),
PathStyle::local()
)
.is_err()
);
// Too many query parameters // Too many query parameters
assert!( assert!(
MentionUri::parse( MentionUri::parse(uri!(
uri!("file:///path/to/file.rs#L10:20?symbol=test&another=param"), "file:///path/to/file.rs#L10:20?symbol=test&another=param"
PathStyle::local() ))
)
.is_err() .is_err()
); );
} }
@@ -538,14 +538,8 @@ mod tests {
#[test] #[test]
fn test_zero_based_line_numbers() { fn test_zero_based_line_numbers() {
// Test that 0-based line numbers are rejected (should be 1-based) // Test that 0-based line numbers are rejected (should be 1-based)
assert!( assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:10")).is_err());
MentionUri::parse(uri!("file:///path/to/file.rs#L0:10"), PathStyle::local()).is_err() assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L1:0")).is_err());
); assert!(MentionUri::parse(uri!("file:///path/to/file.rs#L0:0")).is_err());
assert!(
MentionUri::parse(uri!("file:///path/to/file.rs#L1:0"), PathStyle::local()).is_err()
);
assert!(
MentionUri::parse(uri!("file:///path/to/file.rs#L0:0"), PathStyle::local()).is_err()
);
} }
} }

View File

@@ -1,15 +1,10 @@
use agent_client_protocol as acp; use agent_client_protocol as acp;
use anyhow::Result;
use futures::{FutureExt as _, future::Shared}; use futures::{FutureExt as _, future::Shared};
use gpui::{App, AppContext, AsyncApp, Context, Entity, Task}; use gpui::{App, AppContext, Context, Entity, Task};
use language::LanguageRegistry; use language::LanguageRegistry;
use markdown::Markdown; use markdown::Markdown;
use project::Project;
use settings::{Settings as _, SettingsLocation};
use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant}; use std::{path::PathBuf, process::ExitStatus, sync::Arc, time::Instant};
use task::Shell;
use terminal::terminal_settings::TerminalSettings;
use util::get_default_system_shell_preferring_bash;
pub struct Terminal { pub struct Terminal {
id: acp::TerminalId, id: acp::TerminalId,
@@ -175,68 +170,3 @@ impl Terminal {
) )
} }
} }
pub async fn create_terminal_entity(
command: String,
args: &[String],
env_vars: Vec<(String, String)>,
cwd: Option<PathBuf>,
project: &Entity<Project>,
cx: &mut AsyncApp,
) -> Result<Entity<terminal::Terminal>> {
let mut env = if let Some(dir) = &cwd {
project
.update(cx, |project, cx| {
let worktree = project.find_worktree(dir.as_path(), cx);
let shell = TerminalSettings::get(
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
worktree_id: worktree.read(cx).id(),
path: &path,
}),
cx,
)
.shell
.clone();
project.directory_environment(&shell, dir.clone().into(), cx)
})?
.await
.unwrap_or_default()
} else {
Default::default()
};
// Disables paging for `git` and hopefully other commands
env.insert("PAGER".into(), "".into());
env.extend(env_vars);
// Use remote shell or default system shell, as appropriate
let shell = project
.update(cx, |project, cx| {
project
.remote_client()
.and_then(|r| r.read(cx).default_system_shell())
.map(Shell::Program)
})?
.unwrap_or_else(|| Shell::Program(get_default_system_shell_preferring_bash()));
let is_windows = project
.read_with(cx, |project, cx| project.path_style(cx).is_windows())
.unwrap_or(cfg!(windows));
let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows)
.redirect_stdin_to_dev_null()
.build(Some(command.clone()), &args);
project
.update(cx, |project, cx| {
project.create_terminal_task(
task::SpawnInTerminal {
command: Some(task_command),
args: task_args,
cwd,
env,
..Default::default()
},
cx,
)
})?
.await
}

View File

@@ -26,4 +26,5 @@ settings.workspace = true
theme.workspace = true theme.workspace = true
ui.workspace = true ui.workspace = true
util.workspace = true util.workspace = true
workspace-hack.workspace = true
workspace.workspace = true workspace.workspace = true

View File

@@ -4,26 +4,22 @@ use std::{
fmt::Display, fmt::Display,
rc::{Rc, Weak}, rc::{Rc, Weak},
sync::Arc, sync::Arc,
time::Duration,
}; };
use agent_client_protocol as acp; use agent_client_protocol as acp;
use collections::HashMap; use collections::HashMap;
use gpui::{ use gpui::{
App, ClipboardItem, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState,
ListState, StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*,
prelude::*,
}; };
use language::LanguageRegistry; use language::LanguageRegistry;
use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
use project::Project; use project::Project;
use settings::Settings; use settings::Settings;
use theme::ThemeSettings; use theme::ThemeSettings;
use ui::{Tooltip, prelude::*}; use ui::prelude::*;
use util::ResultExt as _; use util::ResultExt as _;
use workspace::{ use workspace::{Item, Workspace};
Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
};
actions!(dev, [OpenAcpLogs]); actions!(dev, [OpenAcpLogs]);
@@ -93,8 +89,8 @@ struct WatchedConnection {
messages: Vec<WatchedConnectionMessage>, messages: Vec<WatchedConnectionMessage>,
list_state: ListState, list_state: ListState,
connection: Weak<acp::ClientSideConnection>, connection: Weak<acp::ClientSideConnection>,
incoming_request_methods: HashMap<acp::RequestId, Arc<str>>, incoming_request_methods: HashMap<i32, Arc<str>>,
outgoing_request_methods: HashMap<acp::RequestId, Arc<str>>, outgoing_request_methods: HashMap<i32, Arc<str>>,
_task: Task<()>, _task: Task<()>,
} }
@@ -175,7 +171,7 @@ impl AcpTools {
} }
}; };
method_map.insert(id.clone(), method.clone()); method_map.insert(id, method.clone());
(Some(id), method.into(), MessageType::Request, Ok(params)) (Some(id), method.into(), MessageType::Request, Ok(params))
} }
acp::StreamMessageContent::Response { id, result } => { acp::StreamMessageContent::Response { id, result } => {
@@ -231,43 +227,6 @@ impl AcpTools {
cx.notify(); cx.notify();
} }
fn serialize_observed_messages(&self) -> Option<String> {
let connection = self.watched_connection.as_ref()?;
let messages: Vec<serde_json::Value> = connection
.messages
.iter()
.filter_map(|message| {
let params = match &message.params {
Ok(Some(params)) => params.clone(),
Ok(None) => serde_json::Value::Null,
Err(err) => serde_json::to_value(err).ok()?,
};
Some(serde_json::json!({
"_direction": match message.direction {
acp::StreamMessageDirection::Incoming => "incoming",
acp::StreamMessageDirection::Outgoing => "outgoing",
},
"_type": message.message_type.to_string().to_lowercase(),
"id": message.request_id,
"method": message.name.to_string(),
"params": params,
}))
})
.collect();
serde_json::to_string_pretty(&messages).ok()
}
fn clear_messages(&mut self, cx: &mut Context<Self>) {
if let Some(connection) = self.watched_connection.as_mut() {
connection.messages.clear();
connection.list_state.reset(0);
self.expanded.clear();
cx.notify();
}
}
fn render_message( fn render_message(
&mut self, &mut self,
index: usize, index: usize,
@@ -347,7 +306,6 @@ impl AcpTools {
.children( .children(
message message
.request_id .request_id
.as_ref()
.map(|req_id| div().child(ui::Chip::new(req_id.to_string()))), .map(|req_id| div().child(ui::Chip::new(req_id.to_string()))),
), ),
) )
@@ -399,7 +357,7 @@ impl AcpTools {
struct WatchedConnectionMessage { struct WatchedConnectionMessage {
name: SharedString, name: SharedString,
request_id: Option<acp::RequestId>, request_id: Option<i32>,
direction: acp::StreamMessageDirection, direction: acp::StreamMessageDirection,
message_type: MessageType, message_type: MessageType,
params: Result<Option<serde_json::Value>, acp::Error>, params: Result<Option<serde_json::Value>, acp::Error>,
@@ -534,103 +492,3 @@ impl Render for AcpTools {
}) })
} }
} }
pub struct AcpToolsToolbarItemView {
acp_tools: Option<Entity<AcpTools>>,
just_copied: bool,
}
impl AcpToolsToolbarItemView {
pub fn new() -> Self {
Self {
acp_tools: None,
just_copied: false,
}
}
}
impl Render for AcpToolsToolbarItemView {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let Some(acp_tools) = self.acp_tools.as_ref() else {
return Empty.into_any_element();
};
let acp_tools = acp_tools.clone();
let has_messages = acp_tools
.read(cx)
.watched_connection
.as_ref()
.is_some_and(|connection| !connection.messages.is_empty());
h_flex()
.gap_2()
.child({
let acp_tools = acp_tools.clone();
IconButton::new(
"copy_all_messages",
if self.just_copied {
IconName::Check
} else {
IconName::Copy
},
)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text(if self.just_copied {
"Copied!"
} else {
"Copy All Messages"
}))
.disabled(!has_messages)
.on_click(cx.listener(move |this, _, _window, cx| {
if let Some(content) = acp_tools.read(cx).serialize_observed_messages() {
cx.write_to_clipboard(ClipboardItem::new_string(content));
this.just_copied = true;
cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(2)).await;
this.update(cx, |this, cx| {
this.just_copied = false;
cx.notify();
})
})
.detach();
}
}))
})
.child(
IconButton::new("clear_messages", IconName::Trash)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Clear Messages"))
.disabled(!has_messages)
.on_click(cx.listener(move |_this, _, _window, cx| {
acp_tools.update(cx, |acp_tools, cx| {
acp_tools.clear_messages(cx);
});
})),
)
.into_any()
}
}
impl EventEmitter<ToolbarItemEvent> for AcpToolsToolbarItemView {}
impl ToolbarItemView for AcpToolsToolbarItemView {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn ItemHandle>,
_window: &mut Window,
cx: &mut Context<Self>,
) -> ToolbarItemLocation {
if let Some(item) = active_pane_item
&& let Some(acp_tools) = item.downcast::<AcpTools>()
{
self.acp_tools = Some(acp_tools);
cx.notify();
return ToolbarItemLocation::PrimaryRight;
}
if self.acp_tools.take().is_some() {
cx.notify();
}
ToolbarItemLocation::Hidden
}
}

View File

@@ -23,6 +23,7 @@ project.workspace = true
text.workspace = true text.workspace = true
util.workspace = true util.workspace = true
watch.workspace = true watch.workspace = true
workspace-hack.workspace = true
[dev-dependencies] [dev-dependencies]

View File

@@ -3,9 +3,7 @@ use buffer_diff::BufferDiff;
use clock; use clock;
use collections::BTreeMap; use collections::BTreeMap;
use futures::{FutureExt, StreamExt, channel::mpsc}; use futures::{FutureExt, StreamExt, channel::mpsc};
use gpui::{ use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity,
};
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
use std::{cmp, ops::Range, sync::Arc}; use std::{cmp, ops::Range, sync::Arc};
@@ -323,7 +321,6 @@ impl ActionLog {
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone(); let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot); let edits = diff_snapshots(&old_snapshot, &new_snapshot);
let mut has_user_changes = false; let mut has_user_changes = false;
let executor = cx.background_executor().clone();
async move { async move {
if let ChangeAuthor::User = author { if let ChangeAuthor::User = author {
has_user_changes = apply_non_conflicting_edits( has_user_changes = apply_non_conflicting_edits(
@@ -331,7 +328,6 @@ impl ActionLog {
edits, edits,
&mut base_text, &mut base_text,
new_snapshot.as_rope(), new_snapshot.as_rope(),
&executor,
); );
} }
@@ -386,7 +382,6 @@ impl ActionLog {
let agent_diff_base = tracked_buffer.diff_base.clone(); let agent_diff_base = tracked_buffer.diff_base.clone();
let git_diff_base = git_diff.read(cx).base_text().as_rope().clone(); let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
let buffer_text = tracked_buffer.snapshot.as_rope().clone(); let buffer_text = tracked_buffer.snapshot.as_rope().clone();
let executor = cx.background_executor().clone();
anyhow::Ok(cx.background_spawn(async move { anyhow::Ok(cx.background_spawn(async move {
let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable(); let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
let committed_edits = language::line_diff( let committed_edits = language::line_diff(
@@ -421,11 +416,8 @@ impl ActionLog {
), ),
new_agent_diff_base.max_point(), new_agent_diff_base.max_point(),
)); ));
new_agent_diff_base.replace( new_agent_diff_base
old_byte_start..old_byte_end, .replace(old_byte_start..old_byte_end, &unreviewed_new);
&unreviewed_new,
&executor,
);
row_delta += row_delta +=
unreviewed.new_len() as i32 - unreviewed.old_len() as i32; unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
} }
@@ -619,7 +611,6 @@ impl ActionLog {
.snapshot .snapshot
.text_for_range(new_range) .text_for_range(new_range)
.collect::<String>(), .collect::<String>(),
cx.background_executor(),
); );
delta += edit.new_len() as i32 - edit.old_len() as i32; delta += edit.new_len() as i32 - edit.old_len() as i32;
false false
@@ -833,7 +824,6 @@ fn apply_non_conflicting_edits(
edits: Vec<Edit<u32>>, edits: Vec<Edit<u32>>,
old_text: &mut Rope, old_text: &mut Rope,
new_text: &Rope, new_text: &Rope,
executor: &BackgroundExecutor,
) -> bool { ) -> bool {
let mut old_edits = patch.edits().iter().cloned().peekable(); let mut old_edits = patch.edits().iter().cloned().peekable();
let mut new_edits = edits.into_iter().peekable(); let mut new_edits = edits.into_iter().peekable();
@@ -887,7 +877,6 @@ fn apply_non_conflicting_edits(
old_text.replace( old_text.replace(
old_bytes, old_bytes,
&new_text.chunks_in_range(new_bytes).collect::<String>(), &new_text.chunks_in_range(new_bytes).collect::<String>(),
executor,
); );
applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
has_made_changes = true; has_made_changes = true;
@@ -2293,7 +2282,6 @@ mod tests {
old_text.replace( old_text.replace(
old_start..old_end, old_start..old_end,
&new_text.slice_rows(edit.new.clone()).to_string(), &new_text.slice_rows(edit.new.clone()).to_string(),
cx.background_executor(),
); );
} }
pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string()); pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());

View File

@@ -25,6 +25,7 @@ proto.workspace = true
smallvec.workspace = true smallvec.workspace = true
ui.workspace = true ui.workspace = true
util.workspace = true util.workspace = true
workspace-hack.workspace = true
workspace.workspace = true workspace.workspace = true
[dev-dependencies] [dev-dependencies]

View File

@@ -11,7 +11,8 @@ use language::{
LanguageServerStatusUpdate, ServerHealth, LanguageServerStatusUpdate, ServerHealth,
}; };
use project::{ use project::{
LanguageServerProgress, LspStoreEvent, ProgressToken, Project, ProjectEnvironmentEvent, EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
ProjectEnvironmentEvent,
git_store::{GitStoreEvent, Repository}, git_store::{GitStoreEvent, Repository},
}; };
use smallvec::SmallVec; use smallvec::SmallVec;
@@ -19,6 +20,7 @@ use std::{
cmp::Reverse, cmp::Reverse,
collections::HashSet, collections::HashSet,
fmt::Write, fmt::Write,
path::Path,
sync::Arc, sync::Arc,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
@@ -61,7 +63,7 @@ struct ServerStatus {
struct PendingWork<'a> { struct PendingWork<'a> {
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
progress_token: &'a ProgressToken, progress_token: &'a str,
progress: &'a LanguageServerProgress, progress: &'a LanguageServerProgress,
} }
@@ -313,9 +315,9 @@ impl ActivityIndicator {
let mut pending_work = status let mut pending_work = status
.pending_work .pending_work
.iter() .iter()
.map(|(progress_token, progress)| PendingWork { .map(|(token, progress)| PendingWork {
language_server_id: server_id, language_server_id: server_id,
progress_token, progress_token: token.as_str(),
progress, progress,
}) })
.collect::<SmallVec<[_; 4]>>(); .collect::<SmallVec<[_; 4]>>();
@@ -326,23 +328,27 @@ impl ActivityIndicator {
.flatten() .flatten()
} }
fn pending_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a String> { fn pending_environment_errors<'a>(
self.project.read(cx).peek_environment_error(cx) &'a self,
cx: &'a App,
) -> impl Iterator<Item = (&'a Arc<Path>, &'a EnvironmentErrorMessage)> {
self.project.read(cx).shell_environment_errors(cx)
} }
fn content_to_render(&mut self, cx: &mut Context<Self>) -> Option<Content> { fn content_to_render(&mut self, cx: &mut Context<Self>) -> Option<Content> {
// Show if any direnv calls failed // Show if any direnv calls failed
if let Some(message) = self.pending_environment_error(cx) { if let Some((abs_path, error)) = self.pending_environment_errors(cx).next() {
let abs_path = abs_path.clone();
return Some(Content { return Some(Content {
icon: Some( icon: Some(
Icon::new(IconName::Warning) Icon::new(IconName::Warning)
.size(IconSize::Small) .size(IconSize::Small)
.into_any_element(), .into_any_element(),
), ),
message: message.clone(), message: error.0.clone(),
on_click: Some(Arc::new(move |this, window, cx| { on_click: Some(Arc::new(move |this, window, cx| {
this.project.update(cx, |project, cx| { this.project.update(cx, |project, cx| {
project.pop_environment_error(cx); project.remove_environment_error(&abs_path, cx);
}); });
window.dispatch_action(Box::new(workspace::OpenLog), cx); window.dispatch_action(Box::new(workspace::OpenLog), cx);
})), })),
@@ -358,7 +364,11 @@ impl ActivityIndicator {
.. ..
}) = pending_work.next() }) = pending_work.next()
{ {
let mut message = progress.title.clone().unwrap_or(progress_token.to_string()); let mut message = progress
.title
.as_deref()
.unwrap_or(progress_token)
.to_string();
if let Some(percentage) = progress.percentage { if let Some(percentage) = progress.percentage {
write!(&mut message, " ({}%)", percentage).unwrap(); write!(&mut message, " ({}%)", percentage).unwrap();
@@ -769,7 +779,7 @@ impl Render for ActivityIndicator {
let Some(content) = self.content_to_render(cx) else { let Some(content) = self.content_to_render(cx) else {
return result; return result;
}; };
let activity_indicator = cx.entity().downgrade(); let this = cx.entity().downgrade();
let truncate_content = content.message.len() > MAX_MESSAGE_LEN; let truncate_content = content.message.len() > MAX_MESSAGE_LEN;
result.gap_2().child( result.gap_2().child(
PopoverMenu::new("activity-indicator-popover") PopoverMenu::new("activity-indicator-popover")
@@ -811,21 +821,22 @@ impl Render for ActivityIndicator {
) )
.anchor(gpui::Corner::BottomLeft) .anchor(gpui::Corner::BottomLeft)
.menu(move |window, cx| { .menu(move |window, cx| {
let strong_this = activity_indicator.upgrade()?; let strong_this = this.upgrade()?;
let mut has_work = false; let mut has_work = false;
let menu = ContextMenu::build(window, cx, |mut menu, _, cx| { let menu = ContextMenu::build(window, cx, |mut menu, _, cx| {
for work in strong_this.read(cx).pending_language_server_work(cx) { for work in strong_this.read(cx).pending_language_server_work(cx) {
has_work = true; has_work = true;
let activity_indicator = activity_indicator.clone(); let this = this.clone();
let mut title = work let mut title = work
.progress .progress
.title .title
.clone() .as_deref()
.unwrap_or(work.progress_token.to_string()); .unwrap_or(work.progress_token)
.to_owned();
if work.progress.is_cancellable { if work.progress.is_cancellable {
let language_server_id = work.language_server_id; let language_server_id = work.language_server_id;
let token = work.progress_token.clone(); let token = work.progress_token.to_string();
let title = SharedString::from(title); let title = SharedString::from(title);
menu = menu.custom_entry( menu = menu.custom_entry(
move |_, _| { move |_, _| {
@@ -837,23 +848,18 @@ impl Render for ActivityIndicator {
.into_any_element() .into_any_element()
}, },
move |_, cx| { move |_, cx| {
let token = token.clone(); this.update(cx, |this, cx| {
activity_indicator this.project.update(cx, |project, cx| {
.update(cx, |activity_indicator, cx| { project.cancel_language_server_work(
activity_indicator.project.update( language_server_id,
Some(token.clone()),
cx, cx,
|project, cx| {
project.cancel_language_server_work(
language_server_id,
Some(token),
cx,
);
},
); );
activity_indicator.context_menu_handle.hide(cx); });
cx.notify(); this.context_menu_handle.hide(cx);
}) cx.notify();
.ok(); })
.ok();
}, },
); );
} else { } else {

View File

@@ -5,101 +5,75 @@ edition.workspace = true
publish.workspace = true publish.workspace = true
license = "GPL-3.0-or-later" license = "GPL-3.0-or-later"
[lib]
path = "src/agent.rs"
[features]
test-support = ["db/test-support"]
eval = []
unit-eval = []
e2e = []
[lints] [lints]
workspace = true workspace = true
[lib]
path = "src/agent.rs"
doctest = false
[features]
test-support = [
"gpui/test-support",
"language/test-support",
]
[dependencies] [dependencies]
acp_thread.workspace = true
action_log.workspace = true action_log.workspace = true
agent-client-protocol.workspace = true
agent_servers.workspace = true
agent_settings.workspace = true agent_settings.workspace = true
anyhow.workspace = true anyhow.workspace = true
assistant_text_thread.workspace = true assistant_context.workspace = true
assistant_tool.workspace = true
chrono.workspace = true chrono.workspace = true
client.workspace = true client.workspace = true
cloud_llm_client.workspace = true cloud_llm_client.workspace = true
collections.workspace = true collections.workspace = true
component.workspace = true
context_server.workspace = true context_server.workspace = true
db.workspace = true convert_case.workspace = true
derive_more.workspace = true
fs.workspace = true fs.workspace = true
futures.workspace = true futures.workspace = true
git.workspace = true git.workspace = true
gpui.workspace = true gpui.workspace = true
handlebars = { workspace = true, features = ["rust-embed"] } heed.workspace = true
html_to_markdown.workspace = true
http_client.workspace = true http_client.workspace = true
icons.workspace = true
indoc.workspace = true indoc.workspace = true
itertools.workspace = true itertools.workspace = true
language.workspace = true language.workspace = true
language_model.workspace = true language_model.workspace = true
language_models.workspace = true
log.workspace = true log.workspace = true
open.workspace = true
parking_lot.workspace = true
paths.workspace = true paths.workspace = true
postage.workspace = true
project.workspace = true project.workspace = true
prompt_store.workspace = true prompt_store.workspace = true
regex.workspace = true ref-cast.workspace = true
rust-embed.workspace = true rope.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
settings.workspace = true settings.workspace = true
smallvec.workspace = true
smol.workspace = true smol.workspace = true
sqlez.workspace = true sqlez.workspace = true
streaming_diff.workspace = true
strsim.workspace = true
task.workspace = true
telemetry.workspace = true telemetry.workspace = true
terminal.workspace = true
text.workspace = true text.workspace = true
theme.workspace = true
thiserror.workspace = true thiserror.workspace = true
ui.workspace = true time.workspace = true
util.workspace = true util.workspace = true
uuid.workspace = true uuid.workspace = true
watch.workspace = true workspace-hack.workspace = true
web_search.workspace = true
zed_env_vars.workspace = true zed_env_vars.workspace = true
zstd.workspace = true zstd.workspace = true
[dev-dependencies] [dev-dependencies]
agent_servers = { workspace = true, "features" = ["test-support"] } assistant_tools.workspace = true
assistant_text_thread = { workspace = true, "features" = ["test-support"] }
client = { workspace = true, "features" = ["test-support"] }
clock = { workspace = true, "features" = ["test-support"] }
context_server = { workspace = true, "features" = ["test-support"] }
ctor.workspace = true
db = { workspace = true, "features" = ["test-support"] }
editor = { workspace = true, "features" = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, "features" = ["test-support"] }
git = { workspace = true, "features" = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] } gpui = { workspace = true, "features" = ["test-support"] }
gpui_tokio.workspace = true indoc.workspace = true
language = { workspace = true, "features" = ["test-support"] } language = { workspace = true, "features" = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] } language_model = { workspace = true, "features" = ["test-support"] }
lsp = { workspace = true, "features" = ["test-support"] } parking_lot.workspace = true
pretty_assertions.workspace = true pretty_assertions.workspace = true
project = { workspace = true, "features" = ["test-support"] } project = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
rand.workspace = true rand.workspace = true
reqwest_client.workspace = true
settings = { workspace = true, "features" = ["test-support"] }
tempfile.workspace = true
terminal = { workspace = true, "features" = ["test-support"] }
theme = { workspace = true, "features" = ["test-support"] }
tree-sitter-rust.workspace = true
unindent = { workspace = true }
worktree = { workspace = true, "features" = ["test-support"] }
zlog.workspace = true

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,341 @@
use std::sync::Arc;
use agent_settings::{AgentProfileId, AgentProfileSettings, AgentSettings};
use assistant_tool::{Tool, ToolSource, ToolWorkingSet, UniqueToolName};
use collections::IndexMap;
use convert_case::{Case, Casing};
use fs::Fs;
use gpui::{App, Entity, SharedString};
use settings::{Settings, update_settings_file};
use util::ResultExt;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct AgentProfile {
id: AgentProfileId,
tool_set: Entity<ToolWorkingSet>,
}
pub type AvailableProfiles = IndexMap<AgentProfileId, SharedString>;
impl AgentProfile {
pub fn new(id: AgentProfileId, tool_set: Entity<ToolWorkingSet>) -> Self {
Self { id, tool_set }
}
/// Saves a new profile to the settings.
pub fn create(
name: String,
base_profile_id: Option<AgentProfileId>,
fs: Arc<dyn Fs>,
cx: &App,
) -> AgentProfileId {
let id = AgentProfileId(name.to_case(Case::Kebab).into());
let base_profile =
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
let profile_settings = AgentProfileSettings {
name: name.into(),
tools: base_profile
.as_ref()
.map(|profile| profile.tools.clone())
.unwrap_or_default(),
enable_all_context_servers: base_profile
.as_ref()
.map(|profile| profile.enable_all_context_servers)
.unwrap_or_default(),
context_servers: base_profile
.map(|profile| profile.context_servers)
.unwrap_or_default(),
};
update_settings_file(fs, cx, {
let id = id.clone();
move |settings, _cx| {
profile_settings.save_to_settings(id, settings).log_err();
}
});
id
}
/// Returns a map of AgentProfileIds to their names
pub fn available_profiles(cx: &App) -> AvailableProfiles {
let mut profiles = AvailableProfiles::default();
for (id, profile) in AgentSettings::get_global(cx).profiles.iter() {
profiles.insert(id.clone(), profile.name.clone());
}
profiles
}
pub fn id(&self) -> &AgentProfileId {
&self.id
}
pub fn enabled_tools(&self, cx: &App) -> Vec<(UniqueToolName, Arc<dyn Tool>)> {
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
return Vec::new();
};
self.tool_set
.read(cx)
.tools(cx)
.into_iter()
.filter(|(_, tool)| Self::is_enabled(settings, tool.source(), tool.name()))
.collect()
}
pub fn is_tool_enabled(&self, source: ToolSource, tool_name: String, cx: &App) -> bool {
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
return false;
};
Self::is_enabled(settings, source, tool_name)
}
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
match source {
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
ToolSource::ContextServer { id } => settings
.context_servers
.get(id.as_ref())
.and_then(|preset| preset.tools.get(name.as_str()).copied())
.unwrap_or(settings.enable_all_context_servers),
}
}
}
#[cfg(test)]
mod tests {
use agent_settings::ContextServerPreset;
use assistant_tool::ToolRegistry;
use collections::IndexMap;
use gpui::SharedString;
use gpui::{AppContext, TestAppContext};
use http_client::FakeHttpClient;
use project::Project;
use settings::{Settings, SettingsStore};
use super::*;
#[gpui::test]
async fn test_enabled_built_in_tools_for_profile(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId::default();
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
// Provider dependent
.filter(|tool| tool != "web_search")
.collect::<Vec<_>>();
// Plus all registered MCP tools
expected_tools.extend(["enabled_mcp_tool".into(), "disabled_mcp_tool".into()]);
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
#[gpui::test]
async fn test_custom_mcp_settings(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId("custom_mcp".into());
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings.context_servers["mcp"]
.tools
.iter()
.filter_map(|(key, enabled)| enabled.then(|| key.to_string()))
.collect::<Vec<_>>();
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
#[gpui::test]
async fn test_only_built_in(cx: &mut TestAppContext) {
init_test_settings(cx);
let id = AgentProfileId("write_minus_mcp".into());
let profile_settings = cx.read(|cx| {
AgentSettings::get_global(cx)
.profiles
.get(&id)
.unwrap()
.clone()
});
let tool_set = default_tool_set(cx);
let profile = AgentProfile::new(id, tool_set);
let mut enabled_tools = cx
.read(|cx| profile.enabled_tools(cx))
.into_iter()
.map(|(_, tool)| tool.name())
.collect::<Vec<_>>();
enabled_tools.sort();
let mut expected_tools = profile_settings
.tools
.into_iter()
.filter_map(|(tool, enabled)| enabled.then_some(tool.to_string()))
// Provider dependent
.filter(|tool| tool != "web_search")
.collect::<Vec<_>>();
expected_tools.sort();
assert_eq!(enabled_tools, expected_tools);
}
fn init_test_settings(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
Project::init_settings(cx);
AgentSettings::register(cx);
language_model::init_settings(cx);
ToolRegistry::default_global(cx);
assistant_tools::init(FakeHttpClient::with_404_response(), cx);
});
cx.update(|cx| {
let mut agent_settings = AgentSettings::get_global(cx).clone();
agent_settings.profiles.insert(
AgentProfileId("write_minus_mcp".into()),
AgentProfileSettings {
name: "write_minus_mcp".into(),
enable_all_context_servers: false,
..agent_settings.profiles[&AgentProfileId::default()].clone()
},
);
agent_settings.profiles.insert(
AgentProfileId("custom_mcp".into()),
AgentProfileSettings {
name: "mcp".into(),
tools: IndexMap::default(),
enable_all_context_servers: false,
context_servers: IndexMap::from_iter([("mcp".into(), context_server_preset())]),
},
);
AgentSettings::override_global(agent_settings, cx);
})
}
fn context_server_preset() -> ContextServerPreset {
ContextServerPreset {
tools: IndexMap::from_iter([
("enabled_mcp_tool".into(), true),
("disabled_mcp_tool".into(), false),
]),
}
}
fn default_tool_set(cx: &mut TestAppContext) -> Entity<ToolWorkingSet> {
cx.new(|cx| {
let mut tool_set = ToolWorkingSet::default();
tool_set.insert(Arc::new(FakeTool::new("enabled_mcp_tool", "mcp")), cx);
tool_set.insert(Arc::new(FakeTool::new("disabled_mcp_tool", "mcp")), cx);
tool_set
})
}
struct FakeTool {
name: String,
source: SharedString,
}
impl FakeTool {
fn new(name: impl Into<String>, source: impl Into<SharedString>) -> Self {
Self {
name: name.into(),
source: source.into(),
}
}
}
impl Tool for FakeTool {
fn name(&self) -> String {
self.name.clone()
}
fn source(&self) -> ToolSource {
ToolSource::ContextServer {
id: self.source.clone(),
}
}
fn description(&self) -> String {
unimplemented!()
}
fn icon(&self) -> icons::IconName {
unimplemented!()
}
fn needs_confirmation(
&self,
_input: &serde_json::Value,
_project: &Entity<Project>,
_cx: &App,
) -> bool {
unimplemented!()
}
fn ui_text(&self, _input: &serde_json::Value) -> String {
unimplemented!()
}
fn run(
self: Arc<Self>,
_input: serde_json::Value,
_request: Arc<language_model::LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<action_log::ActionLog>,
_model: Arc<dyn language_model::LanguageModel>,
_window: Option<gpui::AnyWindowHandle>,
_cx: &mut App,
) -> assistant_tool::ToolResult {
unimplemented!()
}
fn may_perform_edits(&self) -> bool {
unimplemented!()
}
}
}

View File

@@ -1,8 +1,11 @@
use agent::outline; use crate::thread::Thread;
use assistant_text_thread::TextThread; use assistant_context::AssistantContext;
use assistant_tool::outline;
use collections::HashSet;
use futures::future; use futures::future;
use futures::{FutureExt, future::Shared}; use futures::{FutureExt, future::Shared};
use gpui::{App, AppContext as _, ElementId, Entity, SharedString, Task}; use gpui::{App, AppContext as _, ElementId, Entity, SharedString, Task};
use icons::IconName;
use language::Buffer; use language::Buffer;
use language_model::{LanguageModelImage, LanguageModelRequestMessage, MessageContent}; use language_model::{LanguageModelImage, LanguageModelRequestMessage, MessageContent};
use project::{Project, ProjectEntryId, ProjectPath, Worktree}; use project::{Project, ProjectEntryId, ProjectPath, Worktree};
@@ -14,7 +17,6 @@ use std::hash::{Hash, Hasher};
use std::path::PathBuf; use std::path::PathBuf;
use std::{ops::Range, path::Path, sync::Arc}; use std::{ops::Range, path::Path, sync::Arc};
use text::{Anchor, OffsetRangeExt as _}; use text::{Anchor, OffsetRangeExt as _};
use ui::IconName;
use util::markdown::MarkdownCodeBlock; use util::markdown::MarkdownCodeBlock;
use util::rel_path::RelPath; use util::rel_path::RelPath;
use util::{ResultExt as _, post_inc}; use util::{ResultExt as _, post_inc};
@@ -179,7 +181,7 @@ impl FileContextHandle {
}) })
} }
fn load(self, cx: &App) -> Task<Option<AgentContext>> { fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let buffer_ref = self.buffer.read(cx); let buffer_ref = self.buffer.read(cx);
let Some(file) = buffer_ref.file() else { let Some(file) = buffer_ref.file() else {
log::error!("file context missing path"); log::error!("file context missing path");
@@ -204,7 +206,7 @@ impl FileContextHandle {
text: buffer_content.text.into(), text: buffer_content.text.into(),
is_outline: buffer_content.is_outline, is_outline: buffer_content.is_outline,
}); });
Some(context) Some((context, vec![buffer]))
}) })
} }
} }
@@ -254,7 +256,11 @@ impl DirectoryContextHandle {
self.entry_id.hash(state) self.entry_id.hash(state)
} }
fn load(self, project: Entity<Project>, cx: &mut App) -> Task<Option<AgentContext>> { fn load(
self,
project: Entity<Project>,
cx: &mut App,
) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let Some(worktree) = project.read(cx).worktree_for_entry(self.entry_id, cx) else { let Some(worktree) = project.read(cx).worktree_for_entry(self.entry_id, cx) else {
return Task::ready(None); return Task::ready(None);
}; };
@@ -301,7 +307,7 @@ impl DirectoryContextHandle {
}); });
cx.background_spawn(async move { cx.background_spawn(async move {
let (rope, _buffer) = rope_task.await?; let (rope, buffer) = rope_task.await?;
let fenced_codeblock = MarkdownCodeBlock { let fenced_codeblock = MarkdownCodeBlock {
tag: &codeblock_tag(&full_path, None), tag: &codeblock_tag(&full_path, None),
text: &rope.to_string(), text: &rope.to_string(),
@@ -312,22 +318,18 @@ impl DirectoryContextHandle {
rel_path, rel_path,
fenced_codeblock, fenced_codeblock,
}; };
Some(descendant) Some((descendant, buffer))
}) })
})); }));
cx.background_spawn(async move { cx.background_spawn(async move {
let descendants = descendants_future let (descendants, buffers) = descendants_future.await.into_iter().flatten().unzip();
.await
.into_iter()
.flatten()
.collect::<Vec<_>>();
let context = AgentContext::Directory(DirectoryContext { let context = AgentContext::Directory(DirectoryContext {
handle: self, handle: self,
full_path: directory_full_path, full_path: directory_full_path,
descendants, descendants,
}); });
Some(context) Some((context, buffers))
}) })
} }
} }
@@ -395,7 +397,7 @@ impl SymbolContextHandle {
.into() .into()
} }
fn load(self, cx: &App) -> Task<Option<AgentContext>> { fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let buffer_ref = self.buffer.read(cx); let buffer_ref = self.buffer.read(cx);
let Some(file) = buffer_ref.file() else { let Some(file) = buffer_ref.file() else {
log::error!("symbol context's file has no path"); log::error!("symbol context's file has no path");
@@ -404,13 +406,14 @@ impl SymbolContextHandle {
let full_path = file.full_path(cx).to_string_lossy().into_owned(); let full_path = file.full_path(cx).to_string_lossy().into_owned();
let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot()); let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot());
let text = self.text(cx); let text = self.text(cx);
let buffer = self.buffer.clone();
let context = AgentContext::Symbol(SymbolContext { let context = AgentContext::Symbol(SymbolContext {
handle: self, handle: self,
full_path, full_path,
line_range, line_range,
text, text,
}); });
Task::ready(Some(context)) Task::ready(Some((context, vec![buffer])))
} }
} }
@@ -465,12 +468,13 @@ impl SelectionContextHandle {
.into() .into()
} }
fn load(self, cx: &App) -> Task<Option<AgentContext>> { fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let Some(full_path) = self.full_path(cx) else { let Some(full_path) = self.full_path(cx) else {
log::error!("selection context's file has no path"); log::error!("selection context's file has no path");
return Task::ready(None); return Task::ready(None);
}; };
let text = self.text(cx); let text = self.text(cx);
let buffer = self.buffer.clone();
let context = AgentContext::Selection(SelectionContext { let context = AgentContext::Selection(SelectionContext {
full_path: full_path.to_string_lossy().into_owned(), full_path: full_path.to_string_lossy().into_owned(),
line_range: self.line_range(cx), line_range: self.line_range(cx),
@@ -478,7 +482,7 @@ impl SelectionContextHandle {
handle: self, handle: self,
}); });
Task::ready(Some(context)) Task::ready(Some((context, vec![buffer])))
} }
} }
@@ -519,8 +523,8 @@ impl FetchedUrlContext {
})) }))
} }
pub fn load(self) -> Task<Option<AgentContext>> { pub fn load(self) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
Task::ready(Some(AgentContext::FetchedUrl(self))) Task::ready(Some((AgentContext::FetchedUrl(self), vec![])))
} }
} }
@@ -533,7 +537,7 @@ impl Display for FetchedUrlContext {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ThreadContextHandle { pub struct ThreadContextHandle {
pub thread: Entity<agent::Thread>, pub thread: Entity<Thread>,
pub context_id: ContextId, pub context_id: ContextId,
} }
@@ -554,20 +558,22 @@ impl ThreadContextHandle {
} }
pub fn title(&self, cx: &App) -> SharedString { pub fn title(&self, cx: &App) -> SharedString {
self.thread.read(cx).title() self.thread.read(cx).summary().or_default()
} }
fn load(self, cx: &mut App) -> Task<Option<AgentContext>> { fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let task = self.thread.update(cx, |thread, cx| thread.summary(cx)); cx.spawn(async move |cx| {
let title = self.title(cx); let text = Thread::wait_for_detailed_summary_or_text(&self.thread, cx).await?;
cx.background_spawn(async move { let title = self
let text = task.await?; .thread
.read_with(cx, |thread, _cx| thread.summary().or_default())
.ok()?;
let context = AgentContext::Thread(ThreadContext { let context = AgentContext::Thread(ThreadContext {
title, title,
text, text,
handle: self, handle: self,
}); });
Some(context) Some((context, vec![]))
}) })
} }
} }
@@ -581,7 +587,7 @@ impl Display for ThreadContext {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct TextThreadContextHandle { pub struct TextThreadContextHandle {
pub text_thread: Entity<TextThread>, pub context: Entity<AssistantContext>,
pub context_id: ContextId, pub context_id: ContextId,
} }
@@ -595,26 +601,26 @@ pub struct TextThreadContext {
impl TextThreadContextHandle { impl TextThreadContextHandle {
// pub fn lookup_key() -> // pub fn lookup_key() ->
pub fn eq_for_key(&self, other: &Self) -> bool { pub fn eq_for_key(&self, other: &Self) -> bool {
self.text_thread == other.text_thread self.context == other.context
} }
pub fn hash_for_key<H: Hasher>(&self, state: &mut H) { pub fn hash_for_key<H: Hasher>(&self, state: &mut H) {
self.text_thread.hash(state) self.context.hash(state)
} }
pub fn title(&self, cx: &App) -> SharedString { pub fn title(&self, cx: &App) -> SharedString {
self.text_thread.read(cx).summary().or_default() self.context.read(cx).summary().or_default()
} }
fn load(self, cx: &App) -> Task<Option<AgentContext>> { fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let title = self.title(cx); let title = self.title(cx);
let text = self.text_thread.read(cx).to_xml(cx); let text = self.context.read(cx).to_xml(cx);
let context = AgentContext::TextThread(TextThreadContext { let context = AgentContext::TextThread(TextThreadContext {
title, title,
text: text.into(), text: text.into(),
handle: self, handle: self,
}); });
Task::ready(Some(context)) Task::ready(Some((context, vec![])))
} }
} }
@@ -660,7 +666,7 @@ impl RulesContextHandle {
self, self,
prompt_store: &Option<Entity<PromptStore>>, prompt_store: &Option<Entity<PromptStore>>,
cx: &App, cx: &App,
) -> Task<Option<AgentContext>> { ) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
let Some(prompt_store) = prompt_store.as_ref() else { let Some(prompt_store) = prompt_store.as_ref() else {
return Task::ready(None); return Task::ready(None);
}; };
@@ -679,7 +685,7 @@ impl RulesContextHandle {
title, title,
text, text,
}); });
Some(context) Some((context, vec![]))
}) })
} }
} }
@@ -742,21 +748,32 @@ impl ImageContext {
} }
} }
pub fn load(self, cx: &App) -> Task<Option<AgentContext>> { pub fn load(self, cx: &App) -> Task<Option<(AgentContext, Vec<Entity<Buffer>>)>> {
cx.background_spawn(async move { cx.background_spawn(async move {
self.image_task.clone().await; self.image_task.clone().await;
Some(AgentContext::Image(self)) Some((AgentContext::Image(self), vec![]))
}) })
} }
} }
#[derive(Debug, Clone, Default)]
pub struct ContextLoadResult {
pub loaded_context: LoadedContext,
pub referenced_buffers: HashSet<Entity<Buffer>>,
}
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct LoadedContext { pub struct LoadedContext {
pub contexts: Vec<AgentContext>,
pub text: String, pub text: String,
pub images: Vec<LanguageModelImage>, pub images: Vec<LanguageModelImage>,
} }
impl LoadedContext { impl LoadedContext {
pub fn is_empty(&self) -> bool {
self.text.is_empty() && self.images.is_empty()
}
pub fn add_to_request_message(&self, request_message: &mut LanguageModelRequestMessage) { pub fn add_to_request_message(&self, request_message: &mut LanguageModelRequestMessage) {
if !self.text.is_empty() { if !self.text.is_empty() {
request_message request_message
@@ -787,7 +804,7 @@ pub fn load_context(
project: &Entity<Project>, project: &Entity<Project>,
prompt_store: &Option<Entity<PromptStore>>, prompt_store: &Option<Entity<PromptStore>>,
cx: &mut App, cx: &mut App,
) -> Task<LoadedContext> { ) -> Task<ContextLoadResult> {
let load_tasks: Vec<_> = contexts let load_tasks: Vec<_> = contexts
.into_iter() .into_iter()
.map(|context| match context { .map(|context| match context {
@@ -806,7 +823,16 @@ pub fn load_context(
cx.background_spawn(async move { cx.background_spawn(async move {
let load_results = future::join_all(load_tasks).await; let load_results = future::join_all(load_tasks).await;
let mut contexts = Vec::new();
let mut text = String::new(); let mut text = String::new();
let mut referenced_buffers = HashSet::default();
for context in load_results {
let Some((context, buffers)) = context else {
continue;
};
contexts.push(context);
referenced_buffers.extend(buffers);
}
let mut file_context = Vec::new(); let mut file_context = Vec::new();
let mut directory_context = Vec::new(); let mut directory_context = Vec::new();
@@ -817,7 +843,7 @@ pub fn load_context(
let mut text_thread_context = Vec::new(); let mut text_thread_context = Vec::new();
let mut rules_context = Vec::new(); let mut rules_context = Vec::new();
let mut images = Vec::new(); let mut images = Vec::new();
for context in load_results.into_iter().flatten() { for context in &contexts {
match context { match context {
AgentContext::File(context) => file_context.push(context), AgentContext::File(context) => file_context.push(context),
AgentContext::Directory(context) => directory_context.push(context), AgentContext::Directory(context) => directory_context.push(context),
@@ -842,7 +868,14 @@ pub fn load_context(
&& text_thread_context.is_empty() && text_thread_context.is_empty()
&& rules_context.is_empty() && rules_context.is_empty()
{ {
return LoadedContext { text, images }; return ContextLoadResult {
loaded_context: LoadedContext {
contexts,
text,
images,
},
referenced_buffers,
};
} }
text.push_str( text.push_str(
@@ -928,7 +961,14 @@ pub fn load_context(
text.push_str("</context>\n"); text.push_str("</context>\n");
LoadedContext { text, images } ContextLoadResult {
loaded_context: LoadedContext {
contexts,
text,
images,
},
referenced_buffers,
}
}) })
} }
@@ -1091,13 +1131,11 @@ mod tests {
assert!(content_len > outline::AUTO_OUTLINE_SIZE); assert!(content_len > outline::AUTO_OUTLINE_SIZE);
let file_context = load_context_for("file.txt", large_content, cx).await; let file_context = file_context_for(large_content, cx).await;
assert!( assert!(
file_context file_context.is_outline,
.text "Large file should use outline format"
.contains(&format!("# File outline for {}", path!("test/file.txt"))),
"Large files should not get an outline"
); );
assert!( assert!(
@@ -1115,38 +1153,29 @@ mod tests {
assert!(content_len < outline::AUTO_OUTLINE_SIZE); assert!(content_len < outline::AUTO_OUTLINE_SIZE);
let file_context = load_context_for("file.txt", small_content.to_string(), cx).await; let file_context = file_context_for(small_content.to_string(), cx).await;
assert!( assert!(
!file_context !file_context.is_outline,
.text
.contains(&format!("# File outline for {}", path!("test/file.txt"))),
"Small files should not get an outline" "Small files should not get an outline"
); );
assert!( assert_eq!(file_context.text, small_content);
file_context.text.contains(small_content),
"Small files should use full content"
);
} }
async fn load_context_for( async fn file_context_for(content: String, cx: &mut TestAppContext) -> FileContext {
filename: &str,
content: String,
cx: &mut TestAppContext,
) -> LoadedContext {
// Create a test project with the file // Create a test project with the file
let project = create_test_project( let project = create_test_project(
cx, cx,
json!({ json!({
filename: content, "file.txt": content,
}), }),
) )
.await; .await;
// Open the buffer // Open the buffer
let buffer_path = project let buffer_path = project
.read_with(cx, |project, cx| project.find_project_path(filename, cx)) .read_with(cx, |project, cx| project.find_project_path("file.txt", cx))
.unwrap(); .unwrap();
let buffer = project let buffer = project
@@ -1161,5 +1190,16 @@ mod tests {
cx.update(|cx| load_context(vec![context_handle], &project, &None, cx)) cx.update(|cx| load_context(vec![context_handle], &project, &None, cx))
.await .await
.loaded_context
.contexts
.into_iter()
.find_map(|ctx| {
if let AgentContext::File(file_ctx) = ctx {
Some(file_ctx)
} else {
None
}
})
.expect("Should have found a file context")
} }
} }

View File

@@ -0,0 +1,140 @@
use std::sync::Arc;
use action_log::ActionLog;
use anyhow::{Result, anyhow, bail};
use assistant_tool::{Tool, ToolResult, ToolSource};
use context_server::{ContextServerId, types};
use gpui::{AnyWindowHandle, App, Entity, Task};
use icons::IconName;
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
use project::{Project, context_server_store::ContextServerStore};
pub struct ContextServerTool {
store: Entity<ContextServerStore>,
server_id: ContextServerId,
tool: types::Tool,
}
impl ContextServerTool {
pub fn new(
store: Entity<ContextServerStore>,
server_id: ContextServerId,
tool: types::Tool,
) -> Self {
Self {
store,
server_id,
tool,
}
}
}
impl Tool for ContextServerTool {
fn name(&self) -> String {
self.tool.name.clone()
}
fn description(&self) -> String {
self.tool.description.clone().unwrap_or_default()
}
fn icon(&self) -> IconName {
IconName::ToolHammer
}
fn source(&self) -> ToolSource {
ToolSource::ContextServer {
id: self.server_id.clone().0.into(),
}
}
fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity<Project>, _: &App) -> bool {
true
}
fn may_perform_edits(&self) -> bool {
true
}
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
let mut schema = self.tool.input_schema.clone();
assistant_tool::adapt_schema_to_format(&mut schema, format)?;
Ok(match schema {
serde_json::Value::Null => {
serde_json::json!({ "type": "object", "properties": [] })
}
serde_json::Value::Object(map) if map.is_empty() => {
serde_json::json!({ "type": "object", "properties": [] })
}
_ => schema,
})
}
fn ui_text(&self, _input: &serde_json::Value) -> String {
format!("Run MCP tool `{}`", self.tool.name)
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_request: Arc<LanguageModelRequest>,
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
_model: Arc<dyn LanguageModel>,
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
if let Some(server) = self.store.read(cx).get_running_server(&self.server_id) {
let tool_name = self.tool.name.clone();
cx.spawn(async move |_cx| {
let Some(protocol) = server.client() else {
bail!("Context server not initialized");
};
let arguments = if let serde_json::Value::Object(map) = input {
Some(map.into_iter().collect())
} else {
None
};
log::trace!(
"Running tool: {} with arguments: {:?}",
tool_name,
arguments
);
let response = protocol
.request::<context_server::types::requests::CallTool>(
context_server::types::CallToolParams {
name: tool_name,
arguments,
meta: None,
},
)
.await?;
let mut result = String::new();
for content in response.content {
match content {
types::ToolResponseContent::Text { text } => {
result.push_str(&text);
}
types::ToolResponseContent::Image { .. } => {
log::warn!("Ignoring image content from tool response");
}
types::ToolResponseContent::Audio { .. } => {
log::warn!("Ignoring audio content from tool response");
}
types::ToolResponseContent::Resource { .. } => {
log::warn!("Ignoring resource content from tool response");
}
}
}
Ok(result.into())
})
.into()
} else {
Task::ready(Err(anyhow!("Context server not found"))).into()
}
}
}

View File

@@ -1,11 +1,14 @@
use crate::context::{ use crate::{
AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle, context::{
FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle, SelectionContextHandle, AgentContextHandle, AgentContextKey, ContextId, ContextKind, DirectoryContextHandle,
SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle, FetchedUrlContext, FileContextHandle, ImageContext, RulesContextHandle,
SelectionContextHandle, SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle,
},
thread::{MessageId, Thread, ThreadId},
thread_store::ThreadStore,
}; };
use agent_client_protocol as acp;
use anyhow::{Context as _, Result, anyhow}; use anyhow::{Context as _, Result, anyhow};
use assistant_text_thread::TextThread; use assistant_context::AssistantContext;
use collections::{HashSet, IndexSet}; use collections::{HashSet, IndexSet};
use futures::{self, FutureExt}; use futures::{self, FutureExt};
use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity}; use gpui::{App, Context, Entity, EventEmitter, Image, SharedString, Task, WeakEntity};
@@ -26,9 +29,10 @@ use text::{Anchor, OffsetRangeExt};
pub struct ContextStore { pub struct ContextStore {
project: WeakEntity<Project>, project: WeakEntity<Project>,
thread_store: Option<WeakEntity<ThreadStore>>,
next_context_id: ContextId, next_context_id: ContextId,
context_set: IndexSet<AgentContextKey>, context_set: IndexSet<AgentContextKey>,
context_thread_ids: HashSet<acp::SessionId>, context_thread_ids: HashSet<ThreadId>,
context_text_thread_paths: HashSet<Arc<Path>>, context_text_thread_paths: HashSet<Arc<Path>>,
} }
@@ -39,9 +43,13 @@ pub enum ContextStoreEvent {
impl EventEmitter<ContextStoreEvent> for ContextStore {} impl EventEmitter<ContextStoreEvent> for ContextStore {}
impl ContextStore { impl ContextStore {
pub fn new(project: WeakEntity<Project>) -> Self { pub fn new(
project: WeakEntity<Project>,
thread_store: Option<WeakEntity<ThreadStore>>,
) -> Self {
Self { Self {
project, project,
thread_store,
next_context_id: ContextId::zero(), next_context_id: ContextId::zero(),
context_set: IndexSet::default(), context_set: IndexSet::default(),
context_thread_ids: HashSet::default(), context_thread_ids: HashSet::default(),
@@ -59,6 +67,29 @@ impl ContextStore {
cx.notify(); cx.notify();
} }
pub fn new_context_for_thread(
&self,
thread: &Thread,
exclude_messages_from_id: Option<MessageId>,
) -> Vec<AgentContextHandle> {
let existing_context = thread
.messages()
.take_while(|message| exclude_messages_from_id.is_none_or(|id| message.id != id))
.flat_map(|message| {
message
.loaded_context
.contexts
.iter()
.map(|context| AgentContextKey(context.handle()))
})
.collect::<HashSet<_>>();
self.context_set
.iter()
.filter(|context| !existing_context.contains(context))
.map(|entry| entry.0.clone())
.collect::<Vec<_>>()
}
pub fn add_file_from_path( pub fn add_file_from_path(
&mut self, &mut self,
project_path: ProjectPath, project_path: ProjectPath,
@@ -178,7 +209,7 @@ impl ContextStore {
pub fn add_thread( pub fn add_thread(
&mut self, &mut self,
thread: Entity<agent::Thread>, thread: Entity<Thread>,
remove_if_exists: bool, remove_if_exists: bool,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Option<AgentContextHandle> { ) -> Option<AgentContextHandle> {
@@ -200,13 +231,13 @@ impl ContextStore {
pub fn add_text_thread( pub fn add_text_thread(
&mut self, &mut self,
text_thread: Entity<TextThread>, context: Entity<AssistantContext>,
remove_if_exists: bool, remove_if_exists: bool,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Option<AgentContextHandle> { ) -> Option<AgentContextHandle> {
let context_id = self.next_context_id.post_inc(); let context_id = self.next_context_id.post_inc();
let context = AgentContextHandle::TextThread(TextThreadContextHandle { let context = AgentContextHandle::TextThread(TextThreadContextHandle {
text_thread, context,
context_id, context_id,
}); });
@@ -353,15 +384,21 @@ impl ContextStore {
); );
}; };
} }
SuggestedContext::TextThread { SuggestedContext::Thread { thread, name: _ } => {
text_thread, if let Some(thread) = thread.upgrade() {
name: _, let context_id = self.next_context_id.post_inc();
} => { self.insert_context(
if let Some(text_thread) = text_thread.upgrade() { AgentContextHandle::Thread(ThreadContextHandle { thread, context_id }),
cx,
);
}
}
SuggestedContext::TextThread { context, name: _ } => {
if let Some(context) = context.upgrade() {
let context_id = self.next_context_id.post_inc(); let context_id = self.next_context_id.post_inc();
self.insert_context( self.insert_context(
AgentContextHandle::TextThread(TextThreadContextHandle { AgentContextHandle::TextThread(TextThreadContextHandle {
text_thread, context,
context_id, context_id,
}), }),
cx, cx,
@@ -373,20 +410,20 @@ impl ContextStore {
fn insert_context(&mut self, context: AgentContextHandle, cx: &mut Context<Self>) -> bool { fn insert_context(&mut self, context: AgentContextHandle, cx: &mut Context<Self>) -> bool {
match &context { match &context {
// AgentContextHandle::Thread(thread_context) => { AgentContextHandle::Thread(thread_context) => {
// if let Some(thread_store) = self.thread_store.clone() { if let Some(thread_store) = self.thread_store.clone() {
// thread_context.thread.update(cx, |thread, cx| { thread_context.thread.update(cx, |thread, cx| {
// thread.start_generating_detailed_summary_if_needed(thread_store, cx); thread.start_generating_detailed_summary_if_needed(thread_store, cx);
// }); });
// self.context_thread_ids self.context_thread_ids
// .insert(thread_context.thread.read(cx).id().clone()); .insert(thread_context.thread.read(cx).id().clone());
// } else { } else {
// return false; return false;
// } }
// } }
AgentContextHandle::TextThread(text_thread_context) => { AgentContextHandle::TextThread(text_thread_context) => {
self.context_text_thread_paths self.context_text_thread_paths
.extend(text_thread_context.text_thread.read(cx).path().cloned()); .extend(text_thread_context.context.read(cx).path().cloned());
} }
_ => {} _ => {}
} }
@@ -408,7 +445,7 @@ impl ContextStore {
.remove(thread_context.thread.read(cx).id()); .remove(thread_context.thread.read(cx).id());
} }
AgentContextHandle::TextThread(text_thread_context) => { AgentContextHandle::TextThread(text_thread_context) => {
if let Some(path) = text_thread_context.text_thread.read(cx).path() { if let Some(path) = text_thread_context.context.read(cx).path() {
self.context_text_thread_paths.remove(path); self.context_text_thread_paths.remove(path);
} }
} }
@@ -477,7 +514,7 @@ impl ContextStore {
}) })
} }
pub fn includes_thread(&self, thread_id: &acp::SessionId) -> bool { pub fn includes_thread(&self, thread_id: &ThreadId) -> bool {
self.context_thread_ids.contains(thread_id) self.context_thread_ids.contains(thread_id)
} }
@@ -510,9 +547,9 @@ impl ContextStore {
} }
AgentContextHandle::Directory(_) AgentContextHandle::Directory(_)
| AgentContextHandle::Symbol(_) | AgentContextHandle::Symbol(_)
| AgentContextHandle::Thread(_)
| AgentContextHandle::Selection(_) | AgentContextHandle::Selection(_)
| AgentContextHandle::FetchedUrl(_) | AgentContextHandle::FetchedUrl(_)
| AgentContextHandle::Thread(_)
| AgentContextHandle::TextThread(_) | AgentContextHandle::TextThread(_)
| AgentContextHandle::Rules(_) | AgentContextHandle::Rules(_)
| AgentContextHandle::Image(_) => None, | AgentContextHandle::Image(_) => None,
@@ -520,7 +557,7 @@ impl ContextStore {
.collect() .collect()
} }
pub fn thread_ids(&self) -> &HashSet<acp::SessionId> { pub fn thread_ids(&self) -> &HashSet<ThreadId> {
&self.context_thread_ids &self.context_thread_ids
} }
} }
@@ -532,9 +569,13 @@ pub enum SuggestedContext {
icon_path: Option<SharedString>, icon_path: Option<SharedString>,
buffer: WeakEntity<Buffer>, buffer: WeakEntity<Buffer>,
}, },
Thread {
name: SharedString,
thread: WeakEntity<Thread>,
},
TextThread { TextThread {
name: SharedString, name: SharedString,
text_thread: WeakEntity<TextThread>, context: WeakEntity<AssistantContext>,
}, },
} }
@@ -542,6 +583,7 @@ impl SuggestedContext {
pub fn name(&self) -> &SharedString { pub fn name(&self) -> &SharedString {
match self { match self {
Self::File { name, .. } => name, Self::File { name, .. } => name,
Self::Thread { name, .. } => name,
Self::TextThread { name, .. } => name, Self::TextThread { name, .. } => name,
} }
} }
@@ -549,6 +591,7 @@ impl SuggestedContext {
pub fn icon_path(&self) -> Option<SharedString> { pub fn icon_path(&self) -> Option<SharedString> {
match self { match self {
Self::File { icon_path, .. } => icon_path.clone(), Self::File { icon_path, .. } => icon_path.clone(),
Self::Thread { .. } => None,
Self::TextThread { .. } => None, Self::TextThread { .. } => None,
} }
} }
@@ -556,6 +599,7 @@ impl SuggestedContext {
pub fn kind(&self) -> ContextKind { pub fn kind(&self) -> ContextKind {
match self { match self {
Self::File { .. } => ContextKind::File, Self::File { .. } => ContextKind::File,
Self::Thread { .. } => ContextKind::Thread,
Self::TextThread { .. } => ContextKind::TextThread, Self::TextThread { .. } => ContextKind::TextThread,
} }
} }

View File

@@ -1,128 +1,64 @@
use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use crate::{ThreadId, thread_store::SerializedThreadMetadata};
use acp_thread::MentionUri; use anyhow::{Context as _, Result};
use agent_client_protocol as acp; use assistant_context::SavedContextMetadata;
use anyhow::{Context as _, Result, anyhow};
use assistant_text_thread::{SavedTextThreadMetadata, TextThread};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use db::kvp::KEY_VALUE_STORE;
use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*}; use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*};
use itertools::Itertools; use itertools::Itertools;
use paths::text_threads_dir; use paths::contexts_dir;
use project::Project;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{collections::VecDeque, path::Path, rc::Rc, sync::Arc, time::Duration}; use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration};
use ui::ElementId;
use util::ResultExt as _; use util::ResultExt as _;
const MAX_RECENTLY_OPENED_ENTRIES: usize = 6; const MAX_RECENTLY_OPENED_ENTRIES: usize = 6;
const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads"; const NAVIGATION_HISTORY_PATH: &str = "agent-navigation-history.json";
const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50); const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50);
const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread");
//todo: We should remove this function once we support loading all acp thread
pub fn load_agent_thread(
session_id: acp::SessionId,
history_store: Entity<HistoryStore>,
project: Entity<Project>,
cx: &mut App,
) -> Task<Result<Entity<crate::Thread>>> {
use agent_servers::{AgentServer, AgentServerDelegate};
let server = Rc::new(crate::NativeAgentServer::new(
project.read(cx).fs().clone(),
history_store,
));
let delegate = AgentServerDelegate::new(
project.read(cx).agent_server_store().clone(),
project.clone(),
None,
None,
);
let connection = server.connect(None, delegate, cx);
cx.spawn(async move |cx| {
let (agent, _) = connection.await?;
let agent = agent.downcast::<crate::NativeAgentConnection>().unwrap();
cx.update(|cx| agent.load_thread(session_id, cx))?.await
})
}
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum HistoryEntry { pub enum HistoryEntry {
AcpThread(DbThreadMetadata), Thread(SerializedThreadMetadata),
TextThread(SavedTextThreadMetadata), Context(SavedContextMetadata),
} }
impl HistoryEntry { impl HistoryEntry {
pub fn updated_at(&self) -> DateTime<Utc> { pub fn updated_at(&self) -> DateTime<Utc> {
match self { match self {
HistoryEntry::AcpThread(thread) => thread.updated_at, HistoryEntry::Thread(thread) => thread.updated_at,
HistoryEntry::TextThread(text_thread) => text_thread.mtime.to_utc(), HistoryEntry::Context(context) => context.mtime.to_utc(),
} }
} }
pub fn id(&self) -> HistoryEntryId { pub fn id(&self) -> HistoryEntryId {
match self { match self {
HistoryEntry::AcpThread(thread) => HistoryEntryId::AcpThread(thread.id.clone()), HistoryEntry::Thread(thread) => HistoryEntryId::Thread(thread.id.clone()),
HistoryEntry::TextThread(text_thread) => { HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
HistoryEntryId::TextThread(text_thread.path.clone())
}
}
}
pub fn mention_uri(&self) -> MentionUri {
match self {
HistoryEntry::AcpThread(thread) => MentionUri::Thread {
id: thread.id.clone(),
name: thread.title.to_string(),
},
HistoryEntry::TextThread(text_thread) => MentionUri::TextThread {
path: text_thread.path.as_ref().to_owned(),
name: text_thread.title.to_string(),
},
} }
} }
pub fn title(&self) -> &SharedString { pub fn title(&self) -> &SharedString {
match self { match self {
HistoryEntry::AcpThread(thread) => { HistoryEntry::Thread(thread) => &thread.summary,
if thread.title.is_empty() { HistoryEntry::Context(context) => &context.title,
DEFAULT_TITLE
} else {
&thread.title
}
}
HistoryEntry::TextThread(text_thread) => &text_thread.title,
} }
} }
} }
/// Generic identifier for a history entry. /// Generic identifier for a history entry.
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum HistoryEntryId { pub enum HistoryEntryId {
AcpThread(acp::SessionId), Thread(ThreadId),
TextThread(Arc<Path>), Context(Arc<Path>),
} }
impl Into<ElementId> for HistoryEntryId { #[derive(Serialize, Deserialize)]
fn into(self) -> ElementId {
match self {
HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()),
HistoryEntryId::TextThread(path) => ElementId::Path(path),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
enum SerializedRecentOpen { enum SerializedRecentOpen {
AcpThread(String), Thread(String),
TextThread(String), ContextName(String),
/// Old format which stores the full path
Context(String),
} }
pub struct HistoryStore { pub struct HistoryStore {
threads: Vec<DbThreadMetadata>, context_store: Entity<assistant_context::ContextStore>,
entries: Vec<HistoryEntry>,
text_thread_store: Entity<assistant_text_thread::TextThreadStore>,
recently_opened_entries: VecDeque<HistoryEntryId>, recently_opened_entries: VecDeque<HistoryEntryId>,
_subscriptions: Vec<gpui::Subscription>, _subscriptions: Vec<gpui::Subscription>,
_save_recently_opened_entries_task: Task<()>, _save_recently_opened_entries_task: Task<()>,
@@ -130,133 +66,57 @@ pub struct HistoryStore {
impl HistoryStore { impl HistoryStore {
pub fn new( pub fn new(
text_thread_store: Entity<assistant_text_thread::TextThreadStore>, context_store: Entity<assistant_context::ContextStore>,
initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
let subscriptions = let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())];
vec![cx.observe(&text_thread_store, |this, _, cx| this.update_entries(cx))];
cx.spawn(async move |this, cx| { cx.spawn(async move |this, cx| {
let entries = Self::load_recently_opened_entries(cx).await; let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
this.update(cx, |this, cx| { this.update(cx, |this, _| {
if let Some(entries) = entries.log_err() { this.recently_opened_entries
this.recently_opened_entries = entries; .extend(
} entries.into_iter().take(
MAX_RECENTLY_OPENED_ENTRIES
this.reload(cx); .saturating_sub(this.recently_opened_entries.len()),
),
);
}) })
.ok(); .ok()
}) })
.detach(); .detach();
Self { Self {
text_thread_store, context_store,
recently_opened_entries: VecDeque::default(), recently_opened_entries: initial_recent_entries.into_iter().collect(),
threads: Vec::default(),
entries: Vec::default(),
_subscriptions: subscriptions, _subscriptions: subscriptions,
_save_recently_opened_entries_task: Task::ready(()), _save_recently_opened_entries_task: Task::ready(()),
} }
} }
pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> { pub fn entries(&self, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
self.threads.iter().find(|thread| &thread.id == session_id) let mut history_entries = Vec::new();
}
pub fn load_thread(
&mut self,
id: acp::SessionId,
cx: &mut Context<Self>,
) -> Task<Result<Option<DbThread>>> {
let database_future = ThreadsDatabase::connect(cx);
cx.background_spawn(async move {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.load_thread(id).await
})
}
pub fn delete_thread(
&mut self,
id: acp::SessionId,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let database_future = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?;
this.update(cx, |this, cx| this.reload(cx))
})
}
pub fn delete_text_thread(
&mut self,
path: Arc<Path>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
self.text_thread_store
.update(cx, |store, cx| store.delete_local(path, cx))
}
pub fn load_text_thread(
&self,
path: Arc<Path>,
cx: &mut Context<Self>,
) -> Task<Result<Entity<TextThread>>> {
self.text_thread_store
.update(cx, |store, cx| store.open_local(path, cx))
}
pub fn reload(&self, cx: &mut Context<Self>) {
let database_future = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
let threads = database_future
.await
.map_err(|err| anyhow!(err))?
.list_threads()
.await?;
this.update(cx, |this, cx| {
if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES {
for thread in threads
.iter()
.take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len())
.rev()
{
this.push_recently_opened_entry(
HistoryEntryId::AcpThread(thread.id.clone()),
cx,
)
}
}
this.threads = threads;
this.update_entries(cx);
})
})
.detach_and_log_err(cx);
}
fn update_entries(&mut self, cx: &mut Context<Self>) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
return; return history_entries;
} }
let mut history_entries = Vec::new();
history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread));
history_entries.extend( history_entries.extend(
self.text_thread_store self.context_store
.read(cx) .read(cx)
.unordered_text_threads() .unordered_contexts()
.cloned() .cloned()
.map(HistoryEntry::TextThread), .map(HistoryEntry::Context),
); );
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at())); history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
self.entries = history_entries; history_entries
cx.notify()
} }
pub fn is_empty(&self, _cx: &App) -> bool { pub fn recent_entries(&self, limit: usize, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
self.entries.is_empty() self.entries(cx).into_iter().take(limit).collect()
} }
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> { pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
@@ -265,36 +125,23 @@ impl HistoryStore {
return Vec::new(); return Vec::new();
} }
let thread_entries = self.threads.iter().flat_map(|thread| { let context_entries =
self.recently_opened_entries self.context_store
.iter() .read(cx)
.enumerate() .unordered_contexts()
.flat_map(|(index, entry)| match entry { .flat_map(|context| {
HistoryEntryId::AcpThread(id) if &thread.id == id => { self.recently_opened_entries
Some((index, HistoryEntry::AcpThread(thread.clone()))) .iter()
} .enumerate()
_ => None, .flat_map(|(index, entry)| match entry {
}) HistoryEntryId::Context(path) if &context.path == path => {
}); Some((index, HistoryEntry::Context(context.clone())))
}
_ => None,
})
});
let context_entries = self context_entries
.text_thread_store
.read(cx)
.unordered_text_threads()
.flat_map(|text_thread| {
self.recently_opened_entries
.iter()
.enumerate()
.flat_map(|(index, entry)| match entry {
HistoryEntryId::TextThread(path) if &text_thread.path == path => {
Some((index, HistoryEntry::TextThread(text_thread.clone())))
}
_ => None,
})
});
thread_entries
.chain(context_entries)
// optimization to halt iteration early // optimization to halt iteration early
.take(self.recently_opened_entries.len()) .take(self.recently_opened_entries.len())
.sorted_unstable_by_key(|(index, _)| *index) .sorted_unstable_by_key(|(index, _)| *index)
@@ -307,52 +154,59 @@ impl HistoryStore {
.recently_opened_entries .recently_opened_entries
.iter() .iter()
.filter_map(|entry| match entry { .filter_map(|entry| match entry {
HistoryEntryId::TextThread(path) => path.file_name().map(|file| { HistoryEntryId::Context(path) => path.file_name().map(|file| {
SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned()) SerializedRecentOpen::ContextName(file.to_string_lossy().into_owned())
}), }),
HistoryEntryId::AcpThread(id) => { HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
Some(SerializedRecentOpen::AcpThread(id.to_string()))
}
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| { self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| {
let content = serde_json::to_string(&serialized_entries).unwrap();
cx.background_executor() cx.background_executor()
.timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE) .timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE)
.await; .await;
cx.background_spawn(async move {
if cfg!(any(feature = "test-support", test)) { let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
return; let content = serde_json::to_string(&serialized_entries)?;
} std::fs::write(path, content)?;
KEY_VALUE_STORE anyhow::Ok(())
.write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content) })
.await .await
.log_err(); .log_err();
}); });
} }
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<VecDeque<HistoryEntryId>>> { fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
cx.background_spawn(async move { cx.background_spawn(async move {
if cfg!(any(feature = "test-support", test)) { let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
anyhow::bail!("history store does not persist in tests"); let contents = match smol::fs::read_to_string(path).await {
} Ok(it) => it,
let json = KEY_VALUE_STORE Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
.read_kvp(RECENTLY_OPENED_THREADS_KEY)? return Ok(Vec::new());
.unwrap_or("[]".to_string()); }
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&json) Err(e) => {
return Err(e)
.context("deserializing persisted agent panel navigation history");
}
};
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
.context("deserializing persisted agent panel navigation history")? .context("deserializing persisted agent panel navigation history")?
.into_iter() .into_iter()
.take(MAX_RECENTLY_OPENED_ENTRIES) .take(MAX_RECENTLY_OPENED_ENTRIES)
.flat_map(|entry| match entry { .flat_map(|entry| match entry {
SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread( SerializedRecentOpen::Thread(id) => {
acp::SessionId(id.as_str().into()), Some(HistoryEntryId::Thread(id.as_str().into()))
}
SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context(
contexts_dir().join(file_name).into(),
)), )),
SerializedRecentOpen::TextThread(file_name) => Some( SerializedRecentOpen::Context(path) => {
HistoryEntryId::TextThread(text_threads_dir().join(file_name).into()), Path::new(&path).file_name().map(|file_name| {
), HistoryEntryId::Context(contexts_dir().join(file_name).into())
})
}
}) })
.collect(); .collect::<Vec<_>>();
Ok(entries) Ok(entries)
}) })
} }
@@ -366,9 +220,9 @@ impl HistoryStore {
self.save_recently_opened_entries(cx); self.save_recently_opened_entries(cx);
} }
pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context<Self>) { pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
self.recently_opened_entries.retain( self.recently_opened_entries.retain(
|entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id), |entry| !matches!(entry, HistoryEntryId::Thread(thread_id) if thread_id == &id),
); );
self.save_recently_opened_entries(cx); self.save_recently_opened_entries(cx);
} }
@@ -381,8 +235,8 @@ impl HistoryStore {
) { ) {
for entry in &mut self.recently_opened_entries { for entry in &mut self.recently_opened_entries {
match entry { match entry {
HistoryEntryId::TextThread(path) if path.as_ref() == old_path => { HistoryEntryId::Context(path) if path.as_ref() == old_path => {
*entry = HistoryEntryId::TextThread(new_path.clone()); *entry = HistoryEntryId::Context(new_path.clone());
break; break;
} }
_ => {} _ => {}
@@ -396,8 +250,4 @@ impl HistoryStore {
.retain(|old_entry| old_entry != entry); .retain(|old_entry| old_entry != entry);
self.save_recently_opened_entries(cx); self.save_recently_opened_entries(cx);
} }
pub fn entries(&self) -> impl Iterator<Item = HistoryEntry> {
self.entries.iter().cloned()
}
} }

View File

@@ -1,402 +0,0 @@
use crate::ProjectSnapshot;
use agent_settings::{AgentProfileId, CompletionMode};
use anyhow::Result;
use chrono::{DateTime, Utc};
use gpui::SharedString;
use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
pub enum DetailedSummaryState {
#[default]
NotGenerated,
Generating,
Generated {
text: SharedString,
},
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
pub struct MessageId(pub usize);
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct SerializedThread {
pub version: String,
pub summary: SharedString,
pub updated_at: DateTime<Utc>,
pub messages: Vec<SerializedMessage>,
#[serde(default)]
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
#[serde(default)]
pub cumulative_token_usage: TokenUsage,
#[serde(default)]
pub request_token_usage: Vec<TokenUsage>,
#[serde(default)]
pub detailed_summary_state: DetailedSummaryState,
#[serde(default)]
pub model: Option<SerializedLanguageModel>,
#[serde(default)]
pub completion_mode: Option<CompletionMode>,
#[serde(default)]
pub tool_use_limit_reached: bool,
#[serde(default)]
pub profile: Option<AgentProfileId>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct SerializedLanguageModel {
pub provider: String,
pub model: String,
}
impl SerializedThread {
pub const VERSION: &'static str = "0.2.0";
pub fn from_json(json: &[u8]) -> Result<Self> {
let saved_thread_json = serde_json::from_slice::<serde_json::Value>(json)?;
match saved_thread_json.get("version") {
Some(serde_json::Value::String(version)) => match version.as_str() {
SerializedThreadV0_1_0::VERSION => {
let saved_thread =
serde_json::from_value::<SerializedThreadV0_1_0>(saved_thread_json)?;
Ok(saved_thread.upgrade())
}
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
saved_thread_json,
)?),
_ => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
},
None => {
let saved_thread =
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
Ok(saved_thread.upgrade())
}
version => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SerializedThreadV0_1_0(
// The structure did not change, so we are reusing the latest SerializedThread.
// When making the next version, make sure this points to SerializedThreadV0_2_0
SerializedThread,
);
impl SerializedThreadV0_1_0 {
pub const VERSION: &'static str = "0.1.0";
pub fn upgrade(self) -> SerializedThread {
debug_assert_eq!(SerializedThread::VERSION, "0.2.0");
let mut messages: Vec<SerializedMessage> = Vec::with_capacity(self.0.messages.len());
for message in self.0.messages {
if message.role == Role::User
&& !message.tool_results.is_empty()
&& let Some(last_message) = messages.last_mut()
{
debug_assert!(last_message.role == Role::Assistant);
last_message.tool_results = message.tool_results;
continue;
}
messages.push(message);
}
SerializedThread {
messages,
version: SerializedThread::VERSION.to_string(),
..self.0
}
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct SerializedMessage {
pub id: MessageId,
pub role: Role,
#[serde(default)]
pub segments: Vec<SerializedMessageSegment>,
#[serde(default)]
pub tool_uses: Vec<SerializedToolUse>,
#[serde(default)]
pub tool_results: Vec<SerializedToolResult>,
#[serde(default)]
pub context: String,
#[serde(default)]
pub creases: Vec<SerializedCrease>,
#[serde(default)]
pub is_hidden: bool,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(tag = "type")]
pub enum SerializedMessageSegment {
#[serde(rename = "text")]
Text {
text: String,
},
#[serde(rename = "thinking")]
Thinking {
text: String,
#[serde(skip_serializing_if = "Option::is_none")]
signature: Option<String>,
},
RedactedThinking {
data: String,
},
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct SerializedToolUse {
pub id: LanguageModelToolUseId,
pub name: SharedString,
pub input: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct SerializedToolResult {
pub tool_use_id: LanguageModelToolUseId,
pub is_error: bool,
pub content: LanguageModelToolResultContent,
pub output: Option<serde_json::Value>,
}
#[derive(Serialize, Deserialize)]
struct LegacySerializedThread {
pub summary: SharedString,
pub updated_at: DateTime<Utc>,
pub messages: Vec<LegacySerializedMessage>,
#[serde(default)]
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
}
impl LegacySerializedThread {
pub fn upgrade(self) -> SerializedThread {
SerializedThread {
version: SerializedThread::VERSION.to_string(),
summary: self.summary,
updated_at: self.updated_at,
messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
initial_project_snapshot: self.initial_project_snapshot,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: Vec::new(),
detailed_summary_state: DetailedSummaryState::default(),
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
struct LegacySerializedMessage {
pub id: MessageId,
pub role: Role,
pub text: String,
#[serde(default)]
pub tool_uses: Vec<SerializedToolUse>,
#[serde(default)]
pub tool_results: Vec<SerializedToolResult>,
}
impl LegacySerializedMessage {
fn upgrade(self) -> SerializedMessage {
SerializedMessage {
id: self.id,
role: self.role,
segments: vec![SerializedMessageSegment::Text { text: self.text }],
tool_uses: self.tool_uses,
tool_results: self.tool_results,
context: String::new(),
creases: Vec::new(),
is_hidden: false,
}
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct SerializedCrease {
pub start: usize,
pub end: usize,
pub icon_path: SharedString,
pub label: SharedString,
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::Utc;
use language_model::{Role, TokenUsage};
use pretty_assertions::assert_eq;
#[test]
fn test_legacy_serialized_thread_upgrade() {
let updated_at = Utc::now();
let legacy_thread = LegacySerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![LegacySerializedMessage {
id: MessageId(1),
role: Role::User,
text: "Hello, world!".to_string(),
tool_uses: vec![],
tool_results: vec![],
}],
initial_project_snapshot: None,
};
let upgraded = legacy_thread.upgrade();
assert_eq!(
upgraded,
SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Hello, world!".to_string()
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false
}],
version: SerializedThread::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None
}
)
}
#[test]
fn test_serialized_threadv0_1_0_upgrade() {
let updated_at = Utc::now();
let thread_v0_1_0 = SerializedThreadV0_1_0(SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Use tool_1".to_string(),
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
SerializedMessage {
id: MessageId(2),
role: Role::Assistant,
segments: vec![SerializedMessageSegment::Text {
text: "I want to use a tool".to_string(),
}],
tool_uses: vec![SerializedToolUse {
id: "abc".into(),
name: "tool_1".into(),
input: serde_json::Value::Null,
}],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Here is the tool result".to_string(),
}],
tool_uses: vec![],
tool_results: vec![SerializedToolResult {
tool_use_id: "abc".into(),
is_error: false,
content: LanguageModelToolResultContent::Text("abcdef".into()),
output: Some(serde_json::Value::Null),
}],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
],
version: SerializedThreadV0_1_0::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None,
});
let upgraded = thread_v0_1_0.upgrade();
assert_eq!(
upgraded,
SerializedThread {
summary: "Test conversation".into(),
updated_at,
messages: vec![
SerializedMessage {
id: MessageId(1),
role: Role::User,
segments: vec![SerializedMessageSegment::Text {
text: "Use tool_1".to_string()
}],
tool_uses: vec![],
tool_results: vec![],
context: "".to_string(),
creases: vec![],
is_hidden: false
},
SerializedMessage {
id: MessageId(2),
role: Role::Assistant,
segments: vec![SerializedMessageSegment::Text {
text: "I want to use a tool".to_string(),
}],
tool_uses: vec![SerializedToolUse {
id: "abc".into(),
name: "tool_1".into(),
input: serde_json::Value::Null,
}],
tool_results: vec![SerializedToolResult {
tool_use_id: "abc".into(),
is_error: false,
content: LanguageModelToolResultContent::Text("abcdef".into()),
output: Some(serde_json::Value::Null),
}],
context: "".to_string(),
creases: vec![],
is_hidden: false,
},
],
version: SerializedThread::VERSION.to_string(),
initial_project_snapshot: None,
cumulative_token_usage: TokenUsage::default(),
request_token_usage: vec![],
detailed_summary_state: DetailedSummaryState::default(),
model: None,
completion_mode: None,
tool_use_limit_reached: false,
profile: None
}
)
}
}

View File

@@ -0,0 +1,3 @@
[The following is an auto-generated notification; do not reply]
These files have changed since the last read:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,575 @@
use crate::{
thread::{MessageId, PromptId, ThreadId},
thread_store::SerializedMessage,
};
use agent_settings::CompletionMode;
use anyhow::Result;
use assistant_tool::{
AnyToolCard, Tool, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet,
};
use collections::HashMap;
use futures::{FutureExt as _, future::Shared};
use gpui::{App, Entity, SharedString, Task, Window};
use icons::IconName;
use language_model::{
ConfiguredModel, LanguageModel, LanguageModelExt, LanguageModelRequest,
LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolUse,
LanguageModelToolUseId, Role,
};
use project::Project;
use std::sync::Arc;
use util::truncate_lines_to_byte_limit;
#[derive(Debug)]
pub struct ToolUse {
pub id: LanguageModelToolUseId,
pub name: SharedString,
pub ui_text: SharedString,
pub status: ToolUseStatus,
pub input: serde_json::Value,
pub icon: icons::IconName,
pub needs_confirmation: bool,
}
pub struct ToolUseState {
tools: Entity<ToolWorkingSet>,
tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>,
tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>,
pending_tool_uses_by_id: HashMap<LanguageModelToolUseId, PendingToolUse>,
tool_result_cards: HashMap<LanguageModelToolUseId, AnyToolCard>,
tool_use_metadata_by_id: HashMap<LanguageModelToolUseId, ToolUseMetadata>,
}
impl ToolUseState {
pub fn new(tools: Entity<ToolWorkingSet>) -> Self {
Self {
tools,
tool_uses_by_assistant_message: HashMap::default(),
tool_results: HashMap::default(),
pending_tool_uses_by_id: HashMap::default(),
tool_result_cards: HashMap::default(),
tool_use_metadata_by_id: HashMap::default(),
}
}
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
///
/// Accepts a function to filter the tools that should be used to populate the state.
///
/// If `window` is `None` (e.g., when in headless mode or when running evals),
/// tool cards won't be deserialized
pub fn from_serialized_messages(
tools: Entity<ToolWorkingSet>,
messages: &[SerializedMessage],
project: Entity<Project>,
window: Option<&mut Window>, // None in headless mode
cx: &mut App,
) -> Self {
let mut this = Self::new(tools);
let mut tool_names_by_id = HashMap::default();
let mut window = window;
for message in messages {
match message.role {
Role::Assistant => {
if !message.tool_uses.is_empty() {
let tool_uses = message
.tool_uses
.iter()
.map(|tool_use| LanguageModelToolUse {
id: tool_use.id.clone(),
name: tool_use.name.clone().into(),
raw_input: tool_use.input.to_string(),
input: tool_use.input.clone(),
is_input_complete: true,
})
.collect::<Vec<_>>();
tool_names_by_id.extend(
tool_uses
.iter()
.map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())),
);
this.tool_uses_by_assistant_message
.insert(message.id, tool_uses);
for tool_result in &message.tool_results {
let tool_use_id = tool_result.tool_use_id.clone();
let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else {
log::warn!("no tool name found for tool use: {tool_use_id:?}");
continue;
};
this.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name: tool_use.clone(),
is_error: tool_result.is_error,
content: tool_result.content.clone(),
output: tool_result.output.clone(),
},
);
if let Some(window) = &mut window
&& let Some(tool) = this.tools.read(cx).tool(tool_use, cx)
&& let Some(output) = tool_result.output.clone()
&& let Some(card) =
tool.deserialize_card(output, project.clone(), window, cx)
{
this.tool_result_cards.insert(tool_use_id, card);
}
}
}
}
Role::System | Role::User => {}
}
}
this
}
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
let mut canceled_tool_uses = Vec::new();
self.pending_tool_uses_by_id
.retain(|tool_use_id, tool_use| {
if matches!(tool_use.status, PendingToolUseStatus::Error { .. }) {
return true;
}
let content = "Tool canceled by user".into();
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name: tool_use.name.clone(),
content,
output: None,
is_error: true,
},
);
canceled_tool_uses.push(tool_use.clone());
false
});
canceled_tool_uses
}
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
self.pending_tool_uses_by_id.values().collect()
}
pub fn tool_uses_for_message(
&self,
id: MessageId,
project: &Entity<Project>,
cx: &App,
) -> Vec<ToolUse> {
let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else {
return Vec::new();
};
let mut tool_uses = Vec::new();
for tool_use in tool_uses_for_message.iter() {
let tool_result = self.tool_results.get(&tool_use.id);
let status = (|| {
if let Some(tool_result) = tool_result {
let content = tool_result
.content
.to_str()
.map(|str| str.to_owned().into())
.unwrap_or_default();
return if tool_result.is_error {
ToolUseStatus::Error(content)
} else {
ToolUseStatus::Finished(content)
};
}
if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) {
match pending_tool_use.status {
PendingToolUseStatus::Idle => ToolUseStatus::Pending,
PendingToolUseStatus::NeedsConfirmation { .. } => {
ToolUseStatus::NeedsConfirmation
}
PendingToolUseStatus::Running { .. } => ToolUseStatus::Running,
PendingToolUseStatus::Error(ref err) => {
ToolUseStatus::Error(err.clone().into())
}
PendingToolUseStatus::InputStillStreaming => {
ToolUseStatus::InputStillStreaming
}
}
} else {
ToolUseStatus::Pending
}
})();
let (icon, needs_confirmation) =
if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) {
(
tool.icon(),
tool.needs_confirmation(&tool_use.input, project, cx),
)
} else {
(IconName::Cog, false)
};
tool_uses.push(ToolUse {
id: tool_use.id.clone(),
name: tool_use.name.clone().into(),
ui_text: self.tool_ui_label(
&tool_use.name,
&tool_use.input,
tool_use.is_input_complete,
cx,
),
input: tool_use.input.clone(),
status,
icon,
needs_confirmation,
})
}
tool_uses
}
pub fn tool_ui_label(
&self,
tool_name: &str,
input: &serde_json::Value,
is_input_complete: bool,
cx: &App,
) -> SharedString {
if let Some(tool) = self.tools.read(cx).tool(tool_name, cx) {
if is_input_complete {
tool.ui_text(input).into()
} else {
tool.still_streaming_ui_text(input).into()
}
} else {
format!("Unknown tool {tool_name:?}").into()
}
}
pub fn tool_results_for_message(
&self,
assistant_message_id: MessageId,
) -> Vec<&LanguageModelToolResult> {
let Some(tool_uses) = self
.tool_uses_by_assistant_message
.get(&assistant_message_id)
else {
return Vec::new();
};
tool_uses
.iter()
.filter_map(|tool_use| self.tool_results.get(&tool_use.id))
.collect()
}
pub fn message_has_tool_results(&self, assistant_message_id: MessageId) -> bool {
self.tool_uses_by_assistant_message
.get(&assistant_message_id)
.is_some_and(|results| !results.is_empty())
}
pub fn tool_result(
&self,
tool_use_id: &LanguageModelToolUseId,
) -> Option<&LanguageModelToolResult> {
self.tool_results.get(tool_use_id)
}
pub fn tool_result_card(&self, tool_use_id: &LanguageModelToolUseId) -> Option<&AnyToolCard> {
self.tool_result_cards.get(tool_use_id)
}
pub fn insert_tool_result_card(
&mut self,
tool_use_id: LanguageModelToolUseId,
card: AnyToolCard,
) {
self.tool_result_cards.insert(tool_use_id, card);
}
pub fn request_tool_use(
&mut self,
assistant_message_id: MessageId,
tool_use: LanguageModelToolUse,
metadata: ToolUseMetadata,
cx: &App,
) -> Arc<str> {
let tool_uses = self
.tool_uses_by_assistant_message
.entry(assistant_message_id)
.or_default();
let mut existing_tool_use_found = false;
for existing_tool_use in tool_uses.iter_mut() {
if existing_tool_use.id == tool_use.id {
*existing_tool_use = tool_use.clone();
existing_tool_use_found = true;
}
}
if !existing_tool_use_found {
tool_uses.push(tool_use.clone());
}
let status = if tool_use.is_input_complete {
self.tool_use_metadata_by_id
.insert(tool_use.id.clone(), metadata);
PendingToolUseStatus::Idle
} else {
PendingToolUseStatus::InputStillStreaming
};
let ui_text: Arc<str> = self
.tool_ui_label(
&tool_use.name,
&tool_use.input,
tool_use.is_input_complete,
cx,
)
.into();
let may_perform_edits = self
.tools
.read(cx)
.tool(&tool_use.name, cx)
.is_some_and(|tool| tool.may_perform_edits());
self.pending_tool_uses_by_id.insert(
tool_use.id.clone(),
PendingToolUse {
assistant_message_id,
id: tool_use.id,
name: tool_use.name.clone(),
ui_text: ui_text.clone(),
input: tool_use.input,
may_perform_edits,
status,
},
);
ui_text
}
pub fn run_pending_tool(
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: SharedString,
task: Task<()>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
tool_use.ui_text = ui_text.into();
tool_use.status = PendingToolUseStatus::Running {
_task: task.shared(),
};
}
}
pub fn confirm_tool_use(
&mut self,
tool_use_id: LanguageModelToolUseId,
ui_text: impl Into<Arc<str>>,
input: serde_json::Value,
request: Arc<LanguageModelRequest>,
tool: Arc<dyn Tool>,
) {
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
let ui_text = ui_text.into();
tool_use.ui_text = ui_text.clone();
let confirmation = Confirmation {
tool_use_id,
input,
request,
tool,
ui_text,
};
tool_use.status = PendingToolUseStatus::NeedsConfirmation(Arc::new(confirmation));
}
}
pub fn insert_tool_output(
&mut self,
tool_use_id: LanguageModelToolUseId,
tool_name: Arc<str>,
output: Result<ToolResultOutput>,
configured_model: Option<&ConfiguredModel>,
completion_mode: CompletionMode,
) -> Option<PendingToolUse> {
let metadata = self.tool_use_metadata_by_id.remove(&tool_use_id);
telemetry::event!(
"Agent Tool Finished",
model = metadata
.as_ref()
.map(|metadata| metadata.model.telemetry_id()),
model_provider = metadata
.as_ref()
.map(|metadata| metadata.model.provider_id().to_string()),
thread_id = metadata.as_ref().map(|metadata| metadata.thread_id.clone()),
prompt_id = metadata.as_ref().map(|metadata| metadata.prompt_id.clone()),
tool_name,
success = output.is_ok()
);
match output {
Ok(output) => {
let tool_result = output.content;
const BYTES_PER_TOKEN_ESTIMATE: usize = 3;
let old_use = self.pending_tool_uses_by_id.remove(&tool_use_id);
// Protect from overly large output
let tool_output_limit = configured_model
.map(|model| {
model.model.max_token_count_for_mode(completion_mode.into()) as usize
* BYTES_PER_TOKEN_ESTIMATE
})
.unwrap_or(usize::MAX);
let content = match tool_result {
ToolResultContent::Text(text) => {
let text = if text.len() < tool_output_limit {
text
} else {
let truncated = truncate_lines_to_byte_limit(&text, tool_output_limit);
format!(
"Tool result too long. The first {} bytes:\n\n{}",
truncated.len(),
truncated
)
};
LanguageModelToolResultContent::Text(text.into())
}
ToolResultContent::Image(language_model_image) => {
if language_model_image.estimate_tokens() < tool_output_limit {
LanguageModelToolResultContent::Image(language_model_image)
} else {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content: "Tool responded with an image that would exceeded the remaining tokens".into(),
is_error: true,
output: None,
},
);
return old_use;
}
}
};
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content,
is_error: false,
output: output.output,
},
);
old_use
}
Err(err) => {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
tool_name,
content: LanguageModelToolResultContent::Text(err.to_string().into()),
is_error: true,
output: None,
},
);
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
tool_use.status = PendingToolUseStatus::Error(err.to_string().into());
}
self.pending_tool_uses_by_id.get(&tool_use_id).cloned()
}
}
}
pub fn has_tool_results(&self, assistant_message_id: MessageId) -> bool {
self.tool_uses_by_assistant_message
.contains_key(&assistant_message_id)
}
pub fn tool_results(
&self,
assistant_message_id: MessageId,
) -> impl Iterator<Item = (&LanguageModelToolUse, Option<&LanguageModelToolResult>)> {
self.tool_uses_by_assistant_message
.get(&assistant_message_id)
.into_iter()
.flatten()
.map(|tool_use| (tool_use, self.tool_results.get(&tool_use.id)))
}
}
#[derive(Debug, Clone)]
pub struct PendingToolUse {
pub id: LanguageModelToolUseId,
/// The ID of the Assistant message in which the tool use was requested.
#[allow(unused)]
pub assistant_message_id: MessageId,
pub name: Arc<str>,
pub ui_text: Arc<str>,
pub input: serde_json::Value,
pub status: PendingToolUseStatus,
pub may_perform_edits: bool,
}
#[derive(Debug, Clone)]
pub struct Confirmation {
pub tool_use_id: LanguageModelToolUseId,
pub input: serde_json::Value,
pub ui_text: Arc<str>,
pub request: Arc<LanguageModelRequest>,
pub tool: Arc<dyn Tool>,
}
#[derive(Debug, Clone)]
pub enum PendingToolUseStatus {
InputStillStreaming,
Idle,
NeedsConfirmation(Arc<Confirmation>),
Running { _task: Shared<Task<()>> },
Error(#[allow(unused)] Arc<str>),
}
impl PendingToolUseStatus {
pub fn is_idle(&self) -> bool {
matches!(self, PendingToolUseStatus::Idle)
}
pub fn is_error(&self) -> bool {
matches!(self, PendingToolUseStatus::Error(_))
}
pub fn needs_confirmation(&self) -> bool {
matches!(self, PendingToolUseStatus::NeedsConfirmation { .. })
}
}
#[derive(Clone)]
pub struct ToolUseMetadata {
pub model: Arc<dyn LanguageModel>,
pub thread_id: ThreadId,
pub prompt_id: PromptId,
}

View File

@@ -1,94 +0,0 @@
mod context_server_registry;
mod copy_path_tool;
mod create_directory_tool;
mod delete_path_tool;
mod diagnostics_tool;
mod edit_file_tool;
mod fetch_tool;
mod find_path_tool;
mod grep_tool;
mod list_directory_tool;
mod move_path_tool;
mod now_tool;
mod open_tool;
mod read_file_tool;
mod terminal_tool;
mod thinking_tool;
mod web_search_tool;
use crate::AgentTool;
use language_model::{LanguageModelRequestTool, LanguageModelToolSchemaFormat};
pub use context_server_registry::*;
pub use copy_path_tool::*;
pub use create_directory_tool::*;
pub use delete_path_tool::*;
pub use diagnostics_tool::*;
pub use edit_file_tool::*;
pub use fetch_tool::*;
pub use find_path_tool::*;
pub use grep_tool::*;
pub use list_directory_tool::*;
pub use move_path_tool::*;
pub use now_tool::*;
pub use open_tool::*;
pub use read_file_tool::*;
pub use terminal_tool::*;
pub use thinking_tool::*;
pub use web_search_tool::*;
macro_rules! tools {
($($tool:ty),* $(,)?) => {
/// A list of all built-in tool names
pub fn supported_built_in_tool_names(provider: Option<language_model::LanguageModelProviderId>) -> impl Iterator<Item = String> {
[
$(
(if let Some(provider) = provider.as_ref() {
<$tool>::supports_provider(provider)
} else {
true
})
.then(|| <$tool>::name().to_string()),
)*
]
.into_iter()
.flatten()
}
/// A list of all built-in tools
pub fn built_in_tools() -> impl Iterator<Item = LanguageModelRequestTool> {
fn language_model_tool<T: AgentTool>() -> LanguageModelRequestTool {
LanguageModelRequestTool {
name: T::name().to_string(),
description: T::description().to_string(),
input_schema: T::input_schema(LanguageModelToolSchemaFormat::JsonSchema).to_value(),
}
}
[
$(
language_model_tool::<$tool>(),
)*
]
.into_iter()
}
};
}
tools! {
CopyPathTool,
CreateDirectoryTool,
DeletePathTool,
DiagnosticsTool,
EditFileTool,
FetchTool,
FindPathTool,
GrepTool,
ListDirectoryTool,
MovePathTool,
NowTool,
OpenTool,
ReadFileTool,
TerminalTool,
ThinkingTool,
WebSearchTool,
}

102
crates/agent2/Cargo.toml Normal file
View File

@@ -0,0 +1,102 @@
[package]
name = "agent2"
version = "0.1.0"
edition.workspace = true
publish.workspace = true
license = "GPL-3.0-or-later"
[lib]
path = "src/agent2.rs"
[features]
test-support = ["db/test-support"]
e2e = []
[lints]
workspace = true
[dependencies]
acp_thread.workspace = true
action_log.workspace = true
agent.workspace = true
agent-client-protocol.workspace = true
agent_servers.workspace = true
agent_settings.workspace = true
anyhow.workspace = true
assistant_context.workspace = true
assistant_tool.workspace = true
assistant_tools.workspace = true
chrono.workspace = true
client.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
context_server.workspace = true
db.workspace = true
fs.workspace = true
futures.workspace = true
git.workspace = true
gpui.workspace = true
handlebars = { workspace = true, features = ["rust-embed"] }
html_to_markdown.workspace = true
http_client.workspace = true
indoc.workspace = true
itertools.workspace = true
language.workspace = true
language_model.workspace = true
language_models.workspace = true
log.workspace = true
open.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
prompt_store.workspace = true
rust-embed.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
smol.workspace = true
sqlez.workspace = true
task.workspace = true
telemetry.workspace = true
terminal.workspace = true
thiserror.workspace = true
text.workspace = true
ui.workspace = true
util.workspace = true
uuid.workspace = true
watch.workspace = true
web_search.workspace = true
workspace-hack.workspace = true
zed_env_vars.workspace = true
zstd.workspace = true
[dev-dependencies]
agent = { workspace = true, "features" = ["test-support"] }
agent_servers = { workspace = true, "features" = ["test-support"] }
assistant_context = { workspace = true, "features" = ["test-support"] }
ctor.workspace = true
client = { workspace = true, "features" = ["test-support"] }
clock = { workspace = true, "features" = ["test-support"] }
context_server = { workspace = true, "features" = ["test-support"] }
db = { workspace = true, "features" = ["test-support"] }
editor = { workspace = true, "features" = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, "features" = ["test-support"] }
git = { workspace = true, "features" = ["test-support"] }
gpui = { workspace = true, "features" = ["test-support"] }
gpui_tokio.workspace = true
language = { workspace = true, "features" = ["test-support"] }
language_model = { workspace = true, "features" = ["test-support"] }
lsp = { workspace = true, "features" = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, "features" = ["test-support"] }
reqwest_client.workspace = true
settings = { workspace = true, "features" = ["test-support"] }
tempfile.workspace = true
terminal = { workspace = true, "features" = ["test-support"] }
theme = { workspace = true, "features" = ["test-support"] }
tree-sitter-rust.workspace = true
unindent = { workspace = true }
worktree = { workspace = true, "features" = ["test-support"] }
zlog.workspace = true

1589
crates/agent2/src/agent.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,19 @@
mod agent;
mod db;
mod history_store;
mod native_agent_server;
mod templates;
mod thread;
mod tool_schema;
mod tools;
#[cfg(test)]
mod tests;
pub use agent::*;
pub use db::*;
pub use history_store::*;
pub use native_agent_server::NativeAgentServer;
pub use templates::*;
pub use thread::*;
pub use tools::*;

View File

@@ -1,5 +1,6 @@
use crate::{AgentMessage, AgentMessageContent, UserMessage, UserMessageContent}; use crate::{AgentMessage, AgentMessageContent, UserMessage, UserMessageContent};
use acp_thread::UserMessageId; use acp_thread::UserMessageId;
use agent::{thread::DetailedSummaryState, thread_store};
use agent_client_protocol as acp; use agent_client_protocol as acp;
use agent_settings::{AgentProfileId, CompletionMode}; use agent_settings::{AgentProfileId, CompletionMode};
use anyhow::{Result, anyhow}; use anyhow::{Result, anyhow};
@@ -20,8 +21,8 @@ use ui::{App, SharedString};
use zed_env_vars::ZED_STATELESS; use zed_env_vars::ZED_STATELESS;
pub type DbMessage = crate::Message; pub type DbMessage = crate::Message;
pub type DbSummary = crate::legacy_thread::DetailedSummaryState; pub type DbSummary = DetailedSummaryState;
pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; pub type DbLanguageModel = thread_store::SerializedLanguageModel;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DbThreadMetadata { pub struct DbThreadMetadata {
@@ -39,7 +40,7 @@ pub struct DbThread {
#[serde(default)] #[serde(default)]
pub detailed_summary: Option<SharedString>, pub detailed_summary: Option<SharedString>,
#[serde(default)] #[serde(default)]
pub initial_project_snapshot: Option<Arc<crate::ProjectSnapshot>>, pub initial_project_snapshot: Option<Arc<agent::thread::ProjectSnapshot>>,
#[serde(default)] #[serde(default)]
pub cumulative_token_usage: language_model::TokenUsage, pub cumulative_token_usage: language_model::TokenUsage,
#[serde(default)] #[serde(default)]
@@ -60,17 +61,13 @@ impl DbThread {
match saved_thread_json.get("version") { match saved_thread_json.get("version") {
Some(serde_json::Value::String(version)) => match version.as_str() { Some(serde_json::Value::String(version)) => match version.as_str() {
Self::VERSION => Ok(serde_json::from_value(saved_thread_json)?), Self::VERSION => Ok(serde_json::from_value(saved_thread_json)?),
_ => Self::upgrade_from_agent_1(crate::legacy_thread::SerializedThread::from_json( _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?),
json,
)?),
}, },
_ => { _ => Self::upgrade_from_agent_1(agent::SerializedThread::from_json(json)?),
Self::upgrade_from_agent_1(crate::legacy_thread::SerializedThread::from_json(json)?)
}
} }
} }
fn upgrade_from_agent_1(thread: crate::legacy_thread::SerializedThread) -> Result<Self> { fn upgrade_from_agent_1(thread: agent::SerializedThread) -> Result<Self> {
let mut messages = Vec::new(); let mut messages = Vec::new();
let mut request_token_usage = HashMap::default(); let mut request_token_usage = HashMap::default();
@@ -83,19 +80,14 @@ impl DbThread {
// Convert segments to content // Convert segments to content
for segment in msg.segments { for segment in msg.segments {
match segment { match segment {
crate::legacy_thread::SerializedMessageSegment::Text { text } => { thread_store::SerializedMessageSegment::Text { text } => {
content.push(UserMessageContent::Text(text)); content.push(UserMessageContent::Text(text));
} }
crate::legacy_thread::SerializedMessageSegment::Thinking { thread_store::SerializedMessageSegment::Thinking { text, .. } => {
text,
..
} => {
// User messages don't have thinking segments, but handle gracefully // User messages don't have thinking segments, but handle gracefully
content.push(UserMessageContent::Text(text)); content.push(UserMessageContent::Text(text));
} }
crate::legacy_thread::SerializedMessageSegment::RedactedThinking { thread_store::SerializedMessageSegment::RedactedThinking { .. } => {
..
} => {
// User messages don't have redacted thinking, skip. // User messages don't have redacted thinking, skip.
} }
} }
@@ -121,18 +113,16 @@ impl DbThread {
// Convert segments to content // Convert segments to content
for segment in msg.segments { for segment in msg.segments {
match segment { match segment {
crate::legacy_thread::SerializedMessageSegment::Text { text } => { thread_store::SerializedMessageSegment::Text { text } => {
content.push(AgentMessageContent::Text(text)); content.push(AgentMessageContent::Text(text));
} }
crate::legacy_thread::SerializedMessageSegment::Thinking { thread_store::SerializedMessageSegment::Thinking {
text, text,
signature, signature,
} => { } => {
content.push(AgentMessageContent::Thinking { text, signature }); content.push(AgentMessageContent::Thinking { text, signature });
} }
crate::legacy_thread::SerializedMessageSegment::RedactedThinking { thread_store::SerializedMessageSegment::RedactedThinking { data } => {
data,
} => {
content.push(AgentMessageContent::RedactedThinking(data)); content.push(AgentMessageContent::RedactedThinking(data));
} }
} }
@@ -197,9 +187,10 @@ impl DbThread {
messages, messages,
updated_at: thread.updated_at, updated_at: thread.updated_at,
detailed_summary: match thread.detailed_summary_state { detailed_summary: match thread.detailed_summary_state {
crate::legacy_thread::DetailedSummaryState::NotGenerated DetailedSummaryState::NotGenerated | DetailedSummaryState::Generating { .. } => {
| crate::legacy_thread::DetailedSummaryState::Generating => None, None
crate::legacy_thread::DetailedSummaryState::Generated { text, .. } => Some(text), }
DetailedSummaryState::Generated { text, .. } => Some(text),
}, },
initial_project_snapshot: thread.initial_project_snapshot, initial_project_snapshot: thread.initial_project_snapshot,
cumulative_token_usage: thread.cumulative_token_usage, cumulative_token_usage: thread.cumulative_token_usage,
@@ -423,3 +414,84 @@ impl ThreadsDatabase {
}) })
} }
} }
#[cfg(test)]
mod tests {
use super::*;
use agent::MessageSegment;
use agent::context::LoadedContext;
use client::Client;
use fs::{FakeFs, Fs};
use gpui::AppContext;
use gpui::TestAppContext;
use http_client::FakeHttpClient;
use language_model::Role;
use project::Project;
use settings::SettingsStore;
fn init_test(fs: Arc<dyn Fs>, cx: &mut TestAppContext) {
env_logger::try_init().ok();
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
Project::init_settings(cx);
language::init(cx);
let http_client = FakeHttpClient::with_404_response();
let clock = Arc::new(clock::FakeSystemClock::new());
let client = Client::new(clock, http_client, cx);
agent::init(fs, cx);
agent_settings::init(cx);
language_model::init(client, cx);
});
}
#[gpui::test]
async fn test_retrieving_old_thread(cx: &mut TestAppContext) {
let fs = FakeFs::new(cx.executor());
init_test(fs.clone(), cx);
let project = Project::test(fs, [], cx).await;
// Save a thread using the old agent.
let thread_store = cx.new(|cx| agent::ThreadStore::fake(project, cx));
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
thread.update(cx, |thread, cx| {
thread.insert_message(
Role::User,
vec![MessageSegment::Text("Hey!".into())],
LoadedContext::default(),
vec![],
false,
cx,
);
thread.insert_message(
Role::Assistant,
vec![MessageSegment::Text("How're you doing?".into())],
LoadedContext::default(),
vec![],
false,
cx,
)
});
thread_store
.update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx))
.await
.unwrap();
// Open that same thread using the new agent.
let db = cx.update(ThreadsDatabase::connect).await.unwrap();
let threads = db.list_threads().await.unwrap();
assert_eq!(threads.len(), 1);
let thread = db
.load_thread(threads[0].id.clone())
.await
.unwrap()
.unwrap();
assert_eq!(thread.messages[0].to_markdown(), "## User\n\nHey!\n");
assert_eq!(
thread.messages[1].to_markdown(),
"## Assistant\n\nHow're you doing?\n"
);
}
}

View File

@@ -0,0 +1,357 @@
use crate::{DbThreadMetadata, ThreadsDatabase};
use acp_thread::MentionUri;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result, anyhow};
use assistant_context::{AssistantContext, SavedContextMetadata};
use chrono::{DateTime, Utc};
use db::kvp::KEY_VALUE_STORE;
use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*};
use itertools::Itertools;
use paths::contexts_dir;
use serde::{Deserialize, Serialize};
use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration};
use ui::ElementId;
use util::ResultExt as _;
const MAX_RECENTLY_OPENED_ENTRIES: usize = 6;
const RECENTLY_OPENED_THREADS_KEY: &str = "recent-agent-threads";
const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50);
const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread");
#[derive(Clone, Debug)]
pub enum HistoryEntry {
AcpThread(DbThreadMetadata),
TextThread(SavedContextMetadata),
}
impl HistoryEntry {
pub fn updated_at(&self) -> DateTime<Utc> {
match self {
HistoryEntry::AcpThread(thread) => thread.updated_at,
HistoryEntry::TextThread(context) => context.mtime.to_utc(),
}
}
pub fn id(&self) -> HistoryEntryId {
match self {
HistoryEntry::AcpThread(thread) => HistoryEntryId::AcpThread(thread.id.clone()),
HistoryEntry::TextThread(context) => HistoryEntryId::TextThread(context.path.clone()),
}
}
pub fn mention_uri(&self) -> MentionUri {
match self {
HistoryEntry::AcpThread(thread) => MentionUri::Thread {
id: thread.id.clone(),
name: thread.title.to_string(),
},
HistoryEntry::TextThread(context) => MentionUri::TextThread {
path: context.path.as_ref().to_owned(),
name: context.title.to_string(),
},
}
}
pub fn title(&self) -> &SharedString {
match self {
HistoryEntry::AcpThread(thread) if thread.title.is_empty() => DEFAULT_TITLE,
HistoryEntry::AcpThread(thread) => &thread.title,
HistoryEntry::TextThread(context) => &context.title,
}
}
}
/// Generic identifier for a history entry.
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum HistoryEntryId {
AcpThread(acp::SessionId),
TextThread(Arc<Path>),
}
impl Into<ElementId> for HistoryEntryId {
fn into(self) -> ElementId {
match self {
HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()),
HistoryEntryId::TextThread(path) => ElementId::Path(path),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
enum SerializedRecentOpen {
AcpThread(String),
TextThread(String),
}
pub struct HistoryStore {
threads: Vec<DbThreadMetadata>,
entries: Vec<HistoryEntry>,
context_store: Entity<assistant_context::ContextStore>,
recently_opened_entries: VecDeque<HistoryEntryId>,
_subscriptions: Vec<gpui::Subscription>,
_save_recently_opened_entries_task: Task<()>,
}
impl HistoryStore {
pub fn new(
context_store: Entity<assistant_context::ContextStore>,
cx: &mut Context<Self>,
) -> Self {
let subscriptions = vec![cx.observe(&context_store, |this, _, cx| this.update_entries(cx))];
cx.spawn(async move |this, cx| {
let entries = Self::load_recently_opened_entries(cx).await;
this.update(cx, |this, cx| {
if let Some(entries) = entries.log_err() {
this.recently_opened_entries = entries;
}
this.reload(cx);
})
.ok();
})
.detach();
Self {
context_store,
recently_opened_entries: VecDeque::default(),
threads: Vec::default(),
entries: Vec::default(),
_subscriptions: subscriptions,
_save_recently_opened_entries_task: Task::ready(()),
}
}
pub fn thread_from_session_id(&self, session_id: &acp::SessionId) -> Option<&DbThreadMetadata> {
self.threads.iter().find(|thread| &thread.id == session_id)
}
pub fn delete_thread(
&mut self,
id: acp::SessionId,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let database_future = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?;
this.update(cx, |this, cx| this.reload(cx))
})
}
pub fn delete_text_thread(
&mut self,
path: Arc<Path>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
self.context_store.update(cx, |context_store, cx| {
context_store.delete_local_context(path, cx)
})
}
pub fn load_text_thread(
&self,
path: Arc<Path>,
cx: &mut Context<Self>,
) -> Task<Result<Entity<AssistantContext>>> {
self.context_store.update(cx, |context_store, cx| {
context_store.open_local_context(path, cx)
})
}
pub fn reload(&self, cx: &mut Context<Self>) {
let database_future = ThreadsDatabase::connect(cx);
cx.spawn(async move |this, cx| {
let threads = database_future
.await
.map_err(|err| anyhow!(err))?
.list_threads()
.await?;
this.update(cx, |this, cx| {
if this.recently_opened_entries.len() < MAX_RECENTLY_OPENED_ENTRIES {
for thread in threads
.iter()
.take(MAX_RECENTLY_OPENED_ENTRIES - this.recently_opened_entries.len())
.rev()
{
this.push_recently_opened_entry(
HistoryEntryId::AcpThread(thread.id.clone()),
cx,
)
}
}
this.threads = threads;
this.update_entries(cx);
})
})
.detach_and_log_err(cx);
}
fn update_entries(&mut self, cx: &mut Context<Self>) {
#[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
return;
}
let mut history_entries = Vec::new();
history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread));
history_entries.extend(
self.context_store
.read(cx)
.unordered_contexts()
.cloned()
.map(HistoryEntry::TextThread),
);
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
self.entries = history_entries;
cx.notify()
}
pub fn is_empty(&self, _cx: &App) -> bool {
self.entries.is_empty()
}
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
#[cfg(debug_assertions)]
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
return Vec::new();
}
let thread_entries = self.threads.iter().flat_map(|thread| {
self.recently_opened_entries
.iter()
.enumerate()
.flat_map(|(index, entry)| match entry {
HistoryEntryId::AcpThread(id) if &thread.id == id => {
Some((index, HistoryEntry::AcpThread(thread.clone())))
}
_ => None,
})
});
let context_entries =
self.context_store
.read(cx)
.unordered_contexts()
.flat_map(|context| {
self.recently_opened_entries
.iter()
.enumerate()
.flat_map(|(index, entry)| match entry {
HistoryEntryId::TextThread(path) if &context.path == path => {
Some((index, HistoryEntry::TextThread(context.clone())))
}
_ => None,
})
});
thread_entries
.chain(context_entries)
// optimization to halt iteration early
.take(self.recently_opened_entries.len())
.sorted_unstable_by_key(|(index, _)| *index)
.map(|(_, entry)| entry)
.collect()
}
fn save_recently_opened_entries(&mut self, cx: &mut Context<Self>) {
let serialized_entries = self
.recently_opened_entries
.iter()
.filter_map(|entry| match entry {
HistoryEntryId::TextThread(path) => path.file_name().map(|file| {
SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned())
}),
HistoryEntryId::AcpThread(id) => {
Some(SerializedRecentOpen::AcpThread(id.to_string()))
}
})
.collect::<Vec<_>>();
self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| {
let content = serde_json::to_string(&serialized_entries).unwrap();
cx.background_executor()
.timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE)
.await;
if cfg!(any(feature = "test-support", test)) {
return;
}
KEY_VALUE_STORE
.write_kvp(RECENTLY_OPENED_THREADS_KEY.to_owned(), content)
.await
.log_err();
});
}
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<VecDeque<HistoryEntryId>>> {
cx.background_spawn(async move {
if cfg!(any(feature = "test-support", test)) {
anyhow::bail!("history store does not persist in tests");
}
let json = KEY_VALUE_STORE
.read_kvp(RECENTLY_OPENED_THREADS_KEY)?
.unwrap_or("[]".to_string());
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&json)
.context("deserializing persisted agent panel navigation history")?
.into_iter()
.take(MAX_RECENTLY_OPENED_ENTRIES)
.flat_map(|entry| match entry {
SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread(
acp::SessionId(id.as_str().into()),
)),
SerializedRecentOpen::TextThread(file_name) => Some(
HistoryEntryId::TextThread(contexts_dir().join(file_name).into()),
),
})
.collect();
Ok(entries)
})
}
pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context<Self>) {
self.recently_opened_entries
.retain(|old_entry| old_entry != &entry);
self.recently_opened_entries.push_front(entry);
self.recently_opened_entries
.truncate(MAX_RECENTLY_OPENED_ENTRIES);
self.save_recently_opened_entries(cx);
}
pub fn remove_recently_opened_thread(&mut self, id: acp::SessionId, cx: &mut Context<Self>) {
self.recently_opened_entries.retain(
|entry| !matches!(entry, HistoryEntryId::AcpThread(thread_id) if thread_id == &id),
);
self.save_recently_opened_entries(cx);
}
pub fn replace_recently_opened_text_thread(
&mut self,
old_path: &Path,
new_path: &Arc<Path>,
cx: &mut Context<Self>,
) {
for entry in &mut self.recently_opened_entries {
match entry {
HistoryEntryId::TextThread(path) if path.as_ref() == old_path => {
*entry = HistoryEntryId::TextThread(new_path.clone());
break;
}
_ => {}
}
}
self.save_recently_opened_entries(cx);
}
pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context<Self>) {
self.recently_opened_entries
.retain(|old_entry| old_entry != entry);
self.save_recently_opened_entries(cx);
}
pub fn entries(&self) -> impl Iterator<Item = HistoryEntry> {
self.entries.iter().cloned()
}
}

View File

@@ -81,7 +81,7 @@ impl AgentServer for NativeAgentServer {
mod tests { mod tests {
use super::*; use super::*;
use assistant_text_thread::TextThreadStore; use assistant_context::ContextStore;
use gpui::AppContext; use gpui::AppContext;
agent_servers::e2e_tests::common_e2e_tests!( agent_servers::e2e_tests::common_e2e_tests!(
@@ -116,9 +116,8 @@ mod tests {
}); });
let history = cx.update(|cx| { let history = cx.update(|cx| {
let text_thread_store = let context_store = cx.new(move |cx| ContextStore::fake(project.clone(), cx));
cx.new(move |cx| TextThreadStore::fake(project.clone(), cx)); cx.new(move |cx| HistoryStore::new(context_store, cx))
cx.new(move |cx| HistoryStore::new(text_thread_store, cx))
}); });
NativeAgentServer::new(fs.clone(), history) NativeAgentServer::new(fs.clone(), history)

View File

@@ -38,7 +38,6 @@ pub struct SystemPromptTemplate<'a> {
#[serde(flatten)] #[serde(flatten)]
pub project: &'a prompt_store::ProjectContext, pub project: &'a prompt_store::ProjectContext,
pub available_tools: Vec<SharedString>, pub available_tools: Vec<SharedString>,
pub model_name: Option<String>,
} }
impl Template for SystemPromptTemplate<'_> { impl Template for SystemPromptTemplate<'_> {
@@ -80,11 +79,9 @@ mod tests {
let template = SystemPromptTemplate { let template = SystemPromptTemplate {
project: &project, project: &project,
available_tools: vec!["echo".into()], available_tools: vec!["echo".into()],
model_name: Some("test-model".to_string()),
}; };
let templates = Templates::new(); let templates = Templates::new();
let rendered = template.render(&templates).unwrap(); let rendered = template.render(&templates).unwrap();
assert!(rendered.contains("## Fixing Diagnostics")); assert!(rendered.contains("## Fixing Diagnostics"));
assert!(rendered.contains("test-model"));
} }
} }

View File

@@ -150,12 +150,6 @@ Otherwise, follow debugging best practices:
Operating System: {{os}} Operating System: {{os}}
Default Shell: {{shell}} Default Shell: {{shell}}
{{#if model_name}}
## Model Information
You are powered by the model named {{model_name}}.
{{/if}}
{{#if (or has_rules has_user_rules)}} {{#if (or has_rules has_user_rules)}}
## User's Custom Instructions ## User's Custom Instructions

View File

@@ -160,42 +160,6 @@ async fn test_system_prompt(cx: &mut TestAppContext) {
); );
} }
#[gpui::test]
async fn test_system_prompt_without_tools(cx: &mut TestAppContext) {
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
let fake_model = model.as_fake();
thread
.update(cx, |thread, cx| {
thread.send(UserMessageId::new(), ["abc"], cx)
})
.unwrap();
cx.run_until_parked();
let mut pending_completions = fake_model.pending_completions();
assert_eq!(
pending_completions.len(),
1,
"unexpected pending completions: {:?}",
pending_completions
);
let pending_completion = pending_completions.pop().unwrap();
assert_eq!(pending_completion.messages[0].role, Role::System);
let system_message = &pending_completion.messages[0];
let system_prompt = system_message.content[0].to_str().unwrap();
assert!(
!system_prompt.contains("## Tool Use"),
"unexpected system message: {:?}",
system_message
);
assert!(
!system_prompt.contains("## Fixing Diagnostics"),
"unexpected system message: {:?}",
system_message
);
}
#[gpui::test] #[gpui::test]
async fn test_prompt_caching(cx: &mut TestAppContext) { async fn test_prompt_caching(cx: &mut TestAppContext) {
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
@@ -1011,9 +975,9 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
vec![context_server::types::Tool { vec![context_server::types::Tool {
name: "echo".into(), name: "echo".into(),
description: None, description: None,
input_schema: serde_json::to_value(EchoTool::input_schema( input_schema: serde_json::to_value(
LanguageModelToolSchemaFormat::JsonSchema, EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
)) )
.unwrap(), .unwrap(),
output_schema: None, output_schema: None,
annotations: None, annotations: None,
@@ -1185,9 +1149,9 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
context_server::types::Tool { context_server::types::Tool {
name: "echo".into(), // Conflicts with native EchoTool name: "echo".into(), // Conflicts with native EchoTool
description: None, description: None,
input_schema: serde_json::to_value(EchoTool::input_schema( input_schema: serde_json::to_value(
LanguageModelToolSchemaFormat::JsonSchema, EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
)) )
.unwrap(), .unwrap(),
output_schema: None, output_schema: None,
annotations: None, annotations: None,
@@ -1210,9 +1174,9 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
context_server::types::Tool { context_server::types::Tool {
name: "echo".into(), // Also conflicts with native EchoTool name: "echo".into(), // Also conflicts with native EchoTool
description: None, description: None,
input_schema: serde_json::to_value(EchoTool::input_schema( input_schema: serde_json::to_value(
LanguageModelToolSchemaFormat::JsonSchema, EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema),
)) )
.unwrap(), .unwrap(),
output_schema: None, output_schema: None,
annotations: None, annotations: None,
@@ -1870,9 +1834,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
fake_fs.insert_tree(path!("/test"), json!({})).await; fake_fs.insert_tree(path!("/test"), json!({})).await;
let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await; let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await;
let cwd = Path::new("/test"); let cwd = Path::new("/test");
let text_thread_store = let context_store = cx.new(|cx| assistant_context::ContextStore::fake(project.clone(), cx));
cx.new(|cx| assistant_text_thread::TextThreadStore::fake(project.clone(), cx)); let history_store = cx.new(|cx| HistoryStore::new(context_store, cx));
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
// Create agent and connection // Create agent and connection
let agent = NativeAgent::new( let agent = NativeAgent::new(
@@ -1901,7 +1864,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let selector_opt = connection.model_selector(&session_id); let selector_opt = connection.model_selector(&session_id);
assert!( assert!(
selector_opt.is_some(), selector_opt.is_some(),
"agent should always support ModelSelector" "agent2 should always support ModelSelector"
); );
let selector = selector_opt.unwrap(); let selector = selector_opt.unwrap();
@@ -2032,7 +1995,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
locations: vec![], locations: vec![],
raw_input: Some(json!({})), raw_input: Some(json!({})),
raw_output: None, raw_output: None,
meta: Some(json!({ "tool_name": "thinking" })), meta: None,
} }
); );
let update = expect_tool_call_update_fields(&mut events).await; let update = expect_tool_call_update_fields(&mut events).await;

2642
crates/agent2/src/thread.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,43 @@
use language_model::LanguageModelToolSchemaFormat;
use schemars::{
JsonSchema, Schema,
generate::SchemaSettings,
transform::{Transform, transform_subschemas},
};
pub(crate) fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
let mut generator = match format {
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
.with(|settings| {
settings.meta_schema = None;
settings.inline_subschemas = true;
})
.with_transform(ToJsonSchemaSubsetTransform)
.into_generator(),
};
generator.root_schema_for::<T>()
}
#[derive(Debug, Clone)]
struct ToJsonSchemaSubsetTransform;
impl Transform for ToJsonSchemaSubsetTransform {
fn transform(&mut self, schema: &mut Schema) {
// Ensure that the type field is not an array, this happens when we use
// Option<T>, the type will be [T, "null"].
if let Some(type_field) = schema.get_mut("type")
&& let Some(types) = type_field.as_array()
&& let Some(first_type) = types.first()
{
*type_field = first_type.clone();
}
// oneOf is not supported, use anyOf instead
if let Some(one_of) = schema.remove("oneOf") {
schema.insert("anyOf".to_string(), one_of);
}
transform_subschemas(self, schema);
}
}

View File

@@ -0,0 +1,60 @@
mod context_server_registry;
mod copy_path_tool;
mod create_directory_tool;
mod delete_path_tool;
mod diagnostics_tool;
mod edit_file_tool;
mod fetch_tool;
mod find_path_tool;
mod grep_tool;
mod list_directory_tool;
mod move_path_tool;
mod now_tool;
mod open_tool;
mod read_file_tool;
mod terminal_tool;
mod thinking_tool;
mod web_search_tool;
/// A list of all built in tool names, for use in deduplicating MCP tool names
pub fn default_tool_names() -> impl Iterator<Item = &'static str> {
[
CopyPathTool::name(),
CreateDirectoryTool::name(),
DeletePathTool::name(),
DiagnosticsTool::name(),
EditFileTool::name(),
FetchTool::name(),
FindPathTool::name(),
GrepTool::name(),
ListDirectoryTool::name(),
MovePathTool::name(),
NowTool::name(),
OpenTool::name(),
ReadFileTool::name(),
TerminalTool::name(),
ThinkingTool::name(),
WebSearchTool::name(),
]
.into_iter()
}
pub use context_server_registry::*;
pub use copy_path_tool::*;
pub use create_directory_tool::*;
pub use delete_path_tool::*;
pub use diagnostics_tool::*;
pub use edit_file_tool::*;
pub use fetch_tool::*;
pub use find_path_tool::*;
pub use grep_tool::*;
pub use list_directory_tool::*;
pub use move_path_tool::*;
pub use now_tool::*;
pub use open_tool::*;
pub use read_file_tool::*;
pub use terminal_tool::*;
pub use thinking_tool::*;
pub use web_search_tool::*;
use crate::AgentTool;

View File

@@ -32,17 +32,6 @@ impl ContextServerRegistry {
this this
} }
pub fn tools_for_server(
&self,
server_id: &ContextServerId,
) -> impl Iterator<Item = &Arc<dyn AnyAgentTool>> {
self.registered_servers
.get(server_id)
.map(|server| server.tools.values())
.into_iter()
.flatten()
}
pub fn servers( pub fn servers(
&self, &self,
) -> impl Iterator< ) -> impl Iterator<
@@ -165,7 +154,7 @@ impl AnyAgentTool for ContextServerTool {
format: language_model::LanguageModelToolSchemaFormat, format: language_model::LanguageModelToolSchemaFormat,
) -> Result<serde_json::Value> { ) -> Result<serde_json::Value> {
let mut schema = self.tool.input_schema.clone(); let mut schema = self.tool.input_schema.clone();
crate::tool_schema::adapt_schema_to_format(&mut schema, format)?; assistant_tool::adapt_schema_to_format(&mut schema, format)?;
Ok(match schema { Ok(match schema {
serde_json::Value::Null => { serde_json::Value::Null => {
serde_json::json!({ "type": "object", "properties": [] }) serde_json::json!({ "type": "object", "properties": [] })

View File

@@ -1,10 +1,8 @@
use crate::{ use crate::{AgentTool, Thread, ToolCallEventStream};
AgentTool, Templates, Thread, ToolCallEventStream,
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat},
};
use acp_thread::Diff; use acp_thread::Diff;
use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields};
use anyhow::{Context as _, Result, anyhow}; use anyhow::{Context as _, Result, anyhow};
use assistant_tools::edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat};
use cloud_llm_client::CompletionIntent; use cloud_llm_client::CompletionIntent;
use collections::HashSet; use collections::HashSet;
use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity};
@@ -36,7 +34,7 @@ const DEFAULT_UI_TEXT: &str = "Editing file";
/// ///
/// 2. Verify the directory path is correct (only applicable when creating new files): /// 2. Verify the directory path is correct (only applicable when creating new files):
/// - Use the `list_directory` tool to verify the parent directory exists and is the correct location /// - Use the `list_directory` tool to verify the parent directory exists and is the correct location
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct EditFileToolInput { pub struct EditFileToolInput {
/// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit. /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit.
/// ///
@@ -77,7 +75,7 @@ pub struct EditFileToolInput {
pub mode: EditFileMode, pub mode: EditFileMode,
} }
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
struct EditFileToolPartialInput { struct EditFileToolPartialInput {
#[serde(default)] #[serde(default)]
path: String, path: String,
@@ -125,7 +123,6 @@ pub struct EditFileTool {
thread: WeakEntity<Thread>, thread: WeakEntity<Thread>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
project: Entity<Project>, project: Entity<Project>,
templates: Arc<Templates>,
} }
impl EditFileTool { impl EditFileTool {
@@ -133,13 +130,11 @@ impl EditFileTool {
project: Entity<Project>, project: Entity<Project>,
thread: WeakEntity<Thread>, thread: WeakEntity<Thread>,
language_registry: Arc<LanguageRegistry>, language_registry: Arc<LanguageRegistry>,
templates: Arc<Templates>,
) -> Self { ) -> Self {
Self { Self {
project, project,
thread, thread,
language_registry, language_registry,
templates,
} }
} }
@@ -299,7 +294,8 @@ impl AgentTool for EditFileTool {
model, model,
project.clone(), project.clone(),
action_log.clone(), action_log.clone(),
self.templates.clone(), // TODO: move edit agent to this crate so we can use our templates
assistant_tools::templates::Templates::new(),
edit_format, edit_format,
); );
@@ -569,7 +565,6 @@ mod tests {
use prompt_store::ProjectContext; use prompt_store::ProjectContext;
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use text::Rope;
use util::{path, rel_path::rel_path}; use util::{path, rel_path::rel_path};
#[gpui::test] #[gpui::test]
@@ -604,7 +599,6 @@ mod tests {
project, project,
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)) ))
.run(input, ToolCallEventStream::test().0, cx) .run(input, ToolCallEventStream::test().0, cx)
}) })
@@ -742,7 +736,7 @@ mod tests {
// Create the file // Create the file
fs.save( fs.save(
path!("/root/src/main.rs").as_ref(), path!("/root/src/main.rs").as_ref(),
&Rope::from_str_small("initial content"), &"initial content".into(),
language::LineEnding::Unix, language::LineEnding::Unix,
) )
.await .await
@@ -796,7 +790,7 @@ mod tests {
store.update_user_settings(cx, |settings| { store.update_user_settings(cx, |settings| {
settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On); settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On);
settings.project.all_languages.defaults.formatter = settings.project.all_languages.defaults.formatter =
Some(language::language_settings::FormatterList::default()); Some(language::language_settings::SelectedFormatter::Auto);
}); });
}); });
}); });
@@ -813,7 +807,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry.clone(), language_registry.clone(),
Templates::new(),
)) ))
.run(input, ToolCallEventStream::test().0, cx) .run(input, ToolCallEventStream::test().0, cx)
}); });
@@ -872,7 +865,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)) ))
.run(input, ToolCallEventStream::test().0, cx) .run(input, ToolCallEventStream::test().0, cx)
}); });
@@ -909,7 +901,7 @@ mod tests {
// Create a simple file with trailing whitespace // Create a simple file with trailing whitespace
fs.save( fs.save(
path!("/root/src/main.rs").as_ref(), path!("/root/src/main.rs").as_ref(),
&Rope::from_str_small("initial content"), &"initial content".into(),
language::LineEnding::Unix, language::LineEnding::Unix,
) )
.await .await
@@ -959,7 +951,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry.clone(), language_registry.clone(),
Templates::new(),
)) ))
.run(input, ToolCallEventStream::test().0, cx) .run(input, ToolCallEventStream::test().0, cx)
}); });
@@ -1014,7 +1005,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)) ))
.run(input, ToolCallEventStream::test().0, cx) .run(input, ToolCallEventStream::test().0, cx)
}); });
@@ -1067,7 +1057,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
fs.insert_tree("/root", json!({})).await; fs.insert_tree("/root", json!({})).await;
@@ -1208,7 +1197,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
// Test global config paths - these should require confirmation if they exist and are outside the project // Test global config paths - these should require confirmation if they exist and are outside the project
@@ -1321,7 +1309,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
// Test files in different worktrees // Test files in different worktrees
@@ -1406,7 +1393,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
// Test edge cases // Test edge cases
@@ -1496,7 +1482,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
// Test different EditFileMode values // Test different EditFileMode values
@@ -1581,7 +1566,6 @@ mod tests {
project, project,
thread.downgrade(), thread.downgrade(),
language_registry, language_registry,
Templates::new(),
)); ));
cx.update(|cx| { cx.update(|cx| {
@@ -1669,7 +1653,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
languages.clone(), languages.clone(),
Templates::new(),
)); ));
let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let edit = cx.update(|cx| { let edit = cx.update(|cx| {
@@ -1699,7 +1682,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
languages.clone(), languages.clone(),
Templates::new(),
)); ));
let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let edit = cx.update(|cx| { let edit = cx.update(|cx| {
@@ -1727,7 +1709,6 @@ mod tests {
project.clone(), project.clone(),
thread.downgrade(), thread.downgrade(),
languages.clone(), languages.clone(),
Templates::new(),
)); ));
let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let edit = cx.update(|cx| { let edit = cx.update(|cx| {

Some files were not shown because too many files have changed in this diff Show More