Compare commits
148 Commits
lua-run-cl
...
v0.178.3-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9261f5cc3 | ||
|
|
7d3ea98d50 | ||
|
|
1e979eb53a | ||
|
|
ef71b65b31 | ||
|
|
a664c5d4d2 | ||
|
|
6793a275d1 | ||
|
|
2497ede1a0 | ||
|
|
1c600880f6 | ||
|
|
d4062a8537 | ||
|
|
fe46a11dab | ||
|
|
8d2843c6ff | ||
|
|
ee65485197 | ||
|
|
d0354400f0 | ||
|
|
d7c5808af1 | ||
|
|
7dd9bab46f | ||
|
|
73640a4797 | ||
|
|
52a854ba6e | ||
|
|
c26e354fce | ||
|
|
5f087750fe | ||
|
|
ab04489eb3 | ||
|
|
20456e0b0f | ||
|
|
01f39e99cc | ||
|
|
391eb380b5 | ||
|
|
cdfa3dd922 | ||
|
|
7364f81172 | ||
|
|
44447e288c | ||
|
|
fc76d08057 | ||
|
|
be0595a5ca | ||
|
|
1b6421b3c9 | ||
|
|
cb675c773f | ||
|
|
209f1da94a | ||
|
|
96e1eeda41 | ||
|
|
4994b020e2 | ||
|
|
5bc7479fb8 | ||
|
|
7b821e9e97 | ||
|
|
6259ad559b | ||
|
|
8d259a9dbe | ||
|
|
010c5a2c4e | ||
|
|
45b126a977 | ||
|
|
5f74297576 | ||
|
|
349f57381f | ||
|
|
41eb586ec8 | ||
|
|
6bf6fcaa51 | ||
|
|
6e89537830 | ||
|
|
669c6a3d5e | ||
|
|
910531bc33 | ||
|
|
690f26cf8b | ||
|
|
6b56fee6b0 | ||
|
|
d94001f445 | ||
|
|
6bcfc4014b | ||
|
|
47a89ad243 | ||
|
|
f3f97895a9 | ||
|
|
30afba50a9 | ||
|
|
036c123488 | ||
|
|
050f5f6723 | ||
|
|
2cd970f137 | ||
|
|
d6255fb3d2 | ||
|
|
f9a66ecaed | ||
|
|
cfb9a4beb0 | ||
|
|
9902cd54ce | ||
|
|
96510b72b8 | ||
|
|
a364a13458 | ||
|
|
09a4cfd307 | ||
|
|
5d66c3db85 | ||
|
|
28f33d0103 | ||
|
|
55a90f576a | ||
|
|
8d6abf6537 | ||
|
|
04961a0186 | ||
|
|
fd7ab20ea4 | ||
|
|
7019aca59d | ||
|
|
d43bcc04db | ||
|
|
2b94a35aaa | ||
|
|
e8208643bb | ||
|
|
a90f80725f | ||
|
|
4e6c37d23b | ||
|
|
0cf6259fec | ||
|
|
5cb5e92185 | ||
|
|
da61a28839 | ||
|
|
efdb769f9b | ||
|
|
9cce5a650e | ||
|
|
2021ca5bff | ||
|
|
1771250b04 | ||
|
|
18259c0fd4 | ||
|
|
41ddd1cc97 | ||
|
|
e175878008 | ||
|
|
1cfbfc199c | ||
|
|
f59f2caf7e | ||
|
|
401342c6ec | ||
|
|
0df1e4a489 | ||
|
|
9bd3e156f5 | ||
|
|
42c655751b | ||
|
|
ff1d78df3b | ||
|
|
c2e4fdf63d | ||
|
|
bf11b888c3 | ||
|
|
d562f58e76 | ||
|
|
94e4aa626d | ||
|
|
8ceba89d81 | ||
|
|
c37d6d5fed | ||
|
|
1a3597d726 | ||
|
|
c747cccde3 | ||
|
|
d81e7683ea | ||
|
|
8b29ee6033 | ||
|
|
96a75e08af | ||
|
|
06cbff6714 | ||
|
|
ce05813e7c | ||
|
|
4d1d8d6d78 | ||
|
|
1f8b14f4f1 | ||
|
|
082cc6184c | ||
|
|
6cfc4dc857 | ||
|
|
b9c48685e8 | ||
|
|
570c396e84 | ||
|
|
5fd034e604 | ||
|
|
63dab5f891 | ||
|
|
a2d6df3ed6 | ||
|
|
30e86ac939 | ||
|
|
976fc3ee97 | ||
|
|
63091459d8 | ||
|
|
659fae70f8 | ||
|
|
02e970192f | ||
|
|
5ecc67f2ef | ||
|
|
73dfb10c16 | ||
|
|
e513e81046 | ||
|
|
2fc4dec58f | ||
|
|
3891381d3e | ||
|
|
b91e929086 | ||
|
|
013a646799 | ||
|
|
ed52e759d7 | ||
|
|
6da099a9d7 | ||
|
|
5f159bc95e | ||
|
|
a4462577bf | ||
|
|
c147b58558 | ||
|
|
84fe1bfe9b | ||
|
|
657d7a911d | ||
|
|
ee05cc3ad9 | ||
|
|
5ed144f9d2 | ||
|
|
2a862b3c54 | ||
|
|
4a7c84f490 | ||
|
|
230e2e4107 | ||
|
|
d732b8ba0f | ||
|
|
7c3eecc9c7 | ||
|
|
fff37ab823 | ||
|
|
8a7a78fafb | ||
|
|
6de3ac3e17 | ||
|
|
5aae3bdc69 | ||
|
|
e298301b40 | ||
|
|
ed6bf7f161 | ||
|
|
f14d6670ba | ||
|
|
22d9b5d8ca |
187
.github/workflows/ci.yml
vendored
187
.github/workflows/ci.yml
vendored
@@ -23,9 +23,47 @@ env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
name: Decide which jobs to run
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- name: Fetch git history and generate output filters
|
||||
id: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV=$(git rev-parse HEAD~1)
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV=$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -v "^docs/") ]]; then
|
||||
echo "run_tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^Cargo.lock') ]]; then
|
||||
echo "run_license=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_license=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
@@ -69,6 +107,7 @@ jobs:
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
@@ -76,6 +115,21 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
@@ -91,7 +145,10 @@ jobs:
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
@@ -123,7 +180,9 @@ jobs:
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
fi
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
@@ -154,7 +213,10 @@ jobs:
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -203,9 +265,12 @@ jobs:
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
@@ -239,21 +304,12 @@ jobs:
|
||||
windows_clippy:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: windows-2025-16
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -306,21 +362,13 @@ jobs:
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Tests
|
||||
if: ${{ github.repository_owner == 'zed-industries' && (github.ref == 'refs/heads/main' || contains(github.event.pull_request.labels.*.name, 'windows')) }}
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
# Use bigger runners for PRs (speed); smaller for async (cost)
|
||||
runs-on: ${{ github.event_name == 'pull_request' && 'windows-2025-32' || 'windows-2025-16' }}
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -372,13 +420,50 @@ jobs:
|
||||
Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
|
||||
}
|
||||
|
||||
tests_pass:
|
||||
name: Tests Pass
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- migration_checks
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration checks failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
fi
|
||||
exit $RET_CODE
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 120
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
@@ -468,7 +553,9 @@ jobs:
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -485,7 +572,7 @@ jobs:
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -495,14 +582,18 @@ jobs:
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
@@ -523,7 +614,9 @@ jobs:
|
||||
name: Linux arm64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -540,7 +633,7 @@ jobs:
|
||||
run: ./script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -550,14 +643,18 @@ jobs:
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
@@ -575,7 +672,9 @@ jobs:
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
|
||||
39
.github/workflows/docs.yml
vendored
39
.github/workflows/docs.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check_formatting:
|
||||
name: "Check formatting"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Check for Typos with Typos-CLI
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
files: ./docs/
|
||||
196
Cargo.lock
generated
196
Cargo.lock
generated
@@ -84,7 +84,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "alacritty_terminal"
|
||||
version = "0.25.1-dev"
|
||||
source = "git+https://github.com/zed-industries/alacritty.git?rev=03c2907b44b4189aac5fdeaea331f5aab5c7072e#03c2907b44b4189aac5fdeaea331f5aab5c7072e"
|
||||
source = "git+https://github.com/zed-industries/alacritty.git?branch=add-hush-login-flag#828457c9ff1f7ea0a0469337cc8a37ee3a1b0590"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bitflags 2.8.0",
|
||||
@@ -490,6 +490,7 @@ dependencies = [
|
||||
"proto",
|
||||
"rand 0.8.5",
|
||||
"rope",
|
||||
"scripting_tool",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -657,6 +658,7 @@ dependencies = [
|
||||
"collections",
|
||||
"derive_more",
|
||||
"gpui",
|
||||
"language_model",
|
||||
"parking_lot",
|
||||
"project",
|
||||
"serde",
|
||||
@@ -670,11 +672,17 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"chrono",
|
||||
"collections",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
"language_model",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1233,27 +1241,25 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "aws-lc-rs"
|
||||
version = "1.12.2"
|
||||
version = "1.12.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c2b7ddaa2c56a367ad27a094ad8ef4faacf8a617c2575acb2ba88949df999ca"
|
||||
checksum = "dabb68eb3a7aa08b46fddfd59a3d55c978243557a90ab804769f7e20e67d2b01"
|
||||
dependencies = [
|
||||
"aws-lc-sys",
|
||||
"paste",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aws-lc-sys"
|
||||
version = "0.25.0"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71b2ddd3ada61a305e1d8bb6c005d1eaa7d14d903681edfc400406d523a9b491"
|
||||
checksum = "6bbe221bbf523b625a4dd8585c7f38166e31167ec2ca98051dbcb4c3b6e825d2"
|
||||
dependencies = [
|
||||
"bindgen 0.69.5",
|
||||
"cc",
|
||||
"cmake",
|
||||
"dunce",
|
||||
"fs_extra",
|
||||
"paste",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1823,7 +1829,7 @@ dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
@@ -1846,7 +1852,7 @@ dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
@@ -2300,7 +2306,7 @@ dependencies = [
|
||||
"cap-primitives",
|
||||
"cap-std",
|
||||
"io-lifetimes",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2328,7 +2334,7 @@ dependencies = [
|
||||
"ipnet",
|
||||
"maybe-owned",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
"winx",
|
||||
]
|
||||
|
||||
@@ -2669,7 +2675,7 @@ dependencies = [
|
||||
"serde",
|
||||
"tempfile",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2711,7 +2717,7 @@ dependencies = [
|
||||
"tokio-socks",
|
||||
"url",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
"worktree",
|
||||
]
|
||||
|
||||
@@ -3129,6 +3135,7 @@ dependencies = [
|
||||
"extension",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language_model",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"postage",
|
||||
@@ -4402,7 +4409,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4457,7 +4464,6 @@ dependencies = [
|
||||
"env_logger 0.11.6",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"git",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"language",
|
||||
@@ -5053,7 +5059,7 @@ dependencies = [
|
||||
"text",
|
||||
"time",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5064,7 +5070,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4"
|
||||
dependencies = [
|
||||
"io-lifetimes",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5443,14 +5449,16 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"askpass",
|
||||
"assistant_settings",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"component",
|
||||
"ctor",
|
||||
"db",
|
||||
"editor",
|
||||
"env_logger 0.11.6",
|
||||
"feature_flags",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"git",
|
||||
@@ -5463,6 +5471,7 @@ dependencies = [
|
||||
"log",
|
||||
"menu",
|
||||
"multi_buffer",
|
||||
"notifications",
|
||||
"panel",
|
||||
"picker",
|
||||
"postage",
|
||||
@@ -5478,10 +5487,11 @@ dependencies = [
|
||||
"telemetry",
|
||||
"theme",
|
||||
"time",
|
||||
"time_format",
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
@@ -5681,8 +5691,8 @@ dependencies = [
|
||||
"wayland-cursor",
|
||||
"wayland-protocols",
|
||||
"wayland-protocols-plasma",
|
||||
"windows 0.58.0",
|
||||
"windows-core 0.58.0",
|
||||
"windows 0.60.0",
|
||||
"windows-core 0.60.1",
|
||||
"x11-clipboard",
|
||||
"x11rb",
|
||||
"xim",
|
||||
@@ -6709,7 +6719,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65"
|
||||
dependencies = [
|
||||
"io-lifetimes",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7329,7 +7339,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10507,7 +10517,7 @@ checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"
|
||||
dependencies = [
|
||||
"bytes 1.10.1",
|
||||
"heck 0.5.0",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"log",
|
||||
"multimap 0.10.0",
|
||||
"once_cell",
|
||||
@@ -10540,7 +10550,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
@@ -10734,7 +10744,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"socket2",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11656,7 +11666,7 @@ dependencies = [
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"once_cell",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11915,13 +11925,17 @@ name = "scripting_tool"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"buffer_diff",
|
||||
"clock",
|
||||
"collections",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"mlua",
|
||||
"parking_lot",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"schemars",
|
||||
"serde",
|
||||
@@ -13426,7 +13440,7 @@ dependencies = [
|
||||
"fd-lock",
|
||||
"io-lifetimes",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
"winx",
|
||||
]
|
||||
|
||||
@@ -13566,7 +13580,7 @@ dependencies = [
|
||||
"getrandom 0.3.1",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13613,7 +13627,7 @@ dependencies = [
|
||||
"theme",
|
||||
"thiserror 1.0.69",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13973,6 +13987,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"feature_flags",
|
||||
"git_ui",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"notifications",
|
||||
@@ -13990,7 +14005,7 @@ dependencies = [
|
||||
"tree-sitter-md",
|
||||
"ui",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
"zeta",
|
||||
@@ -14697,7 +14712,7 @@ dependencies = [
|
||||
"theme",
|
||||
"ui_macros",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -15912,7 +15927,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -15969,6 +15984,28 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows"
|
||||
version = "0.60.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddf874e74c7a99773e62b1c671427abf01a425e77c3d3fb9fb1e4883ea934529"
|
||||
dependencies = [
|
||||
"windows-collections",
|
||||
"windows-core 0.60.1",
|
||||
"windows-future",
|
||||
"windows-link",
|
||||
"windows-numerics",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-collections"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5467f79cc1ba3f52ebb2ed41dbb459b8e7db636cc3429458d9a852e15bc24dec"
|
||||
dependencies = [
|
||||
"windows-core 0.60.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.52.0"
|
||||
@@ -16009,10 +16046,33 @@ dependencies = [
|
||||
"windows-implement 0.58.0",
|
||||
"windows-interface 0.58.0",
|
||||
"windows-result 0.2.0",
|
||||
"windows-strings",
|
||||
"windows-strings 0.1.0",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-core"
|
||||
version = "0.60.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca21a92a9cae9bf4ccae5cf8368dce0837100ddf6e6d57936749e85f152f6247"
|
||||
dependencies = [
|
||||
"windows-implement 0.59.0",
|
||||
"windows-interface 0.59.0",
|
||||
"windows-link",
|
||||
"windows-result 0.3.1",
|
||||
"windows-strings 0.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-future"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a787db4595e7eb80239b74ce8babfb1363d8e343ab072f2ffe901400c03349f0"
|
||||
dependencies = [
|
||||
"windows-core 0.60.1",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-implement"
|
||||
version = "0.57.0"
|
||||
@@ -16035,6 +16095,17 @@ dependencies = [
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-implement"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-interface"
|
||||
version = "0.57.0"
|
||||
@@ -16057,12 +16128,33 @@ dependencies = [
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-interface"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
|
||||
|
||||
[[package]]
|
||||
name = "windows-numerics"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "005dea54e2f6499f2cee279b8f703b3cf3b5734a2d8d21867c8f44003182eeed"
|
||||
dependencies = [
|
||||
"windows-core 0.60.1",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-registry"
|
||||
version = "0.2.0"
|
||||
@@ -16070,7 +16162,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0"
|
||||
dependencies = [
|
||||
"windows-result 0.2.0",
|
||||
"windows-strings",
|
||||
"windows-strings 0.1.0",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
@@ -16092,6 +16184,15 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-result"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-strings"
|
||||
version = "0.1.0"
|
||||
@@ -16102,6 +16203,15 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-strings"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.45.0"
|
||||
@@ -16377,7 +16487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -16898,7 +17008,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.178.0"
|
||||
version = "0.178.3"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
@@ -16984,7 +17094,6 @@ dependencies = [
|
||||
"repl",
|
||||
"reqwest_client",
|
||||
"rope",
|
||||
"scripting_tool",
|
||||
"search",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -17019,7 +17128,7 @@ dependencies = [
|
||||
"vim",
|
||||
"vim_mode_setting",
|
||||
"welcome",
|
||||
"windows 0.58.0",
|
||||
"windows 0.60.0",
|
||||
"winresource",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
@@ -17116,13 +17225,6 @@ dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zed_terraform"
|
||||
version = "0.1.2"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zed_test_extension"
|
||||
version = "0.1.0"
|
||||
|
||||
15
Cargo.toml
15
Cargo.toml
@@ -178,7 +178,6 @@ members = [
|
||||
"extensions/ruff",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/terraform",
|
||||
"extensions/test-extension",
|
||||
"extensions/toml",
|
||||
"extensions/uiua",
|
||||
@@ -370,7 +369,7 @@ zeta = { path = "crates/zeta" }
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", rev = "03c2907b44b4189aac5fdeaea331f5aab5c7072e" }
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
arrayvec = { version = "0.7.4", features = ["serde"] }
|
||||
@@ -597,12 +596,12 @@ features = [
|
||||
]
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.58"
|
||||
version = "0.60"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
"Storage",
|
||||
"Storage_Search",
|
||||
"Storage_Streams",
|
||||
"System_Threading",
|
||||
"UI_StartScreen",
|
||||
"UI_ViewManagement",
|
||||
@@ -623,9 +622,11 @@ features = [
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
"Win32_System_Console",
|
||||
"Win32_System_DataExchange",
|
||||
"Win32_System_IO",
|
||||
"Win32_System_LibraryLoader",
|
||||
"Win32_System_Memory",
|
||||
"Win32_System_Ole",
|
||||
"Win32_System_Pipes",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
@@ -751,5 +752,9 @@ new_ret_no_self = { level = "allow" }
|
||||
should_implement_trait = { level = "allow" }
|
||||
let_underscore_future = "allow"
|
||||
|
||||
# in Rust it can be very tedious to reduce argument count without
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme"]
|
||||
|
||||
7
assets/icons/file_icons/wgsl.svg
Normal file
7
assets/icons/file_icons/wgsl.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.5 13L1.5 5H11.5L6.5 13Z" fill="black"/>
|
||||
<path d="M14 9H9L11.5 5L14 9Z" fill="black" fill-opacity="0.75"/>
|
||||
<path d="M9 9L14 9L11.5 13L9 9Z" fill="black" fill-opacity="0.65"/>
|
||||
<path d="M14 5L15.25 7L12.75 7L14 5Z" fill="black" fill-opacity="0.5"/>
|
||||
<path d="M14 9L12.75 7H15.25L14 9Z" fill="black" fill-opacity="0.55"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 432 B |
40
assets/icons/git_onboarding_bg.svg
Normal file
40
assets/icons/git_onboarding_bg.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<svg width="400" height="120" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<pattern id="tilePattern" width="124" height="24" patternUnits="userSpaceOnUse">
|
||||
<svg width="124" height="24" viewBox="0 0 124 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.2">
|
||||
<path d="M16.666 12.0013L11.9993 16.668L7.33268 12.0013" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 7.33464L12 16.668" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 8.33464C29.3682 8.33464 29.6667 8.03616 29.6667 7.66797C29.6667 7.29978 29.3682 7.0013 29 7.0013C28.6318 7.0013 28.3333 7.29978 28.3333 7.66797C28.3333 8.03616 28.6318 8.33464 29 8.33464ZM29 9.66797C30.1046 9.66797 31 8.77254 31 7.66797C31 6.5634 30.1046 5.66797 29 5.66797C27.8954 5.66797 27 6.5634 27 7.66797C27 8.77254 27.8954 9.66797 29 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M35 8.33464C35.3682 8.33464 35.6667 8.03616 35.6667 7.66797C35.6667 7.29978 35.3682 7.0013 35 7.0013C34.6318 7.0013 34.3333 7.29978 34.3333 7.66797C34.3333 8.03616 34.6318 8.33464 35 8.33464ZM35 9.66797C36.1046 9.66797 37 8.77254 37 7.66797C37 6.5634 36.1046 5.66797 35 5.66797C33.8954 5.66797 33 6.5634 33 7.66797C33 8.77254 33.8954 9.66797 35 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 16.9987C29.3682 16.9987 29.6667 16.7002 29.6667 16.332C29.6667 15.9638 29.3682 15.6654 29 15.6654C28.6318 15.6654 28.3333 15.9638 28.3333 16.332C28.3333 16.7002 28.6318 16.9987 29 16.9987ZM29 18.332C30.1046 18.332 31 17.4366 31 16.332C31 15.2275 30.1046 14.332 29 14.332C27.8954 14.332 27 15.2275 27 16.332C27 17.4366 27.8954 18.332 29 18.332Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M28.334 9H29.6673V11.4615C30.2383 11.1443 31.0005 11 32.0007 11H33.6675C34.0356 11 34.334 10.7017 34.334 10.3333V9H35.6673V10.3333C35.6673 11.4378 34.7723 12.3333 33.6675 12.3333H32.0007C30.8614 12.3333 30.3692 12.5484 30.1298 12.7549C29.9016 12.9516 29.7857 13.2347 29.6673 13.742V15H28.334V9Z" fill="white"/>
|
||||
<path d="M48.668 8.66406H55.3346V15.3307" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M48.668 15.3307L55.3346 8.66406" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M76.5871 9.40624C76.8514 9.14195 77 8.78346 77 8.40965C77 8.03583 76.8516 7.67731 76.5873 7.41295C76.323 7.14859 75.9645 7.00005 75.5907 7C75.2169 6.99995 74.8584 7.14841 74.594 7.4127L67.921 14.0874C67.8049 14.2031 67.719 14.3456 67.671 14.5024L67.0105 16.6784C66.9975 16.7217 66.9966 16.7676 67.0076 16.8113C67.0187 16.8551 67.0414 16.895 67.0734 16.9269C67.1053 16.9588 67.1453 16.9815 67.1891 16.9925C67.2328 17.0035 67.2788 17.0024 67.322 16.9894L69.4985 16.3294C69.6551 16.2818 69.7976 16.1964 69.9135 16.0809L76.5871 9.40624Z" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M74 8L76 10" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M70.3877 7.53516V6.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M73.5693 16.6992V17.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M66.3877 10.5352H67.3877" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M77.5693 13.6992H76.5693" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M68.3877 8.53516L67.3877 7.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M75.5693 15.6992L76.5693 16.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M87.334 11.9987L92.0007 7.33203L96.6673 11.9987" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M92 16.6654V7.33203" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M117 12C117 10.6739 116.473 9.40215 115.536 8.46447C114.598 7.52678 113.326 7 112 7C110.602 7.00526 109.261 7.55068 108.256 8.52222L107 9.77778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 7V9.77778H109.778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 12C107 13.3261 107.527 14.5979 108.464 15.5355C109.402 16.4732 110.674 17 112 17C113.398 16.9947 114.739 16.4493 115.744 15.4778L117 14.2222" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M114.223 14.2188H117V16.9965" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</g>
|
||||
</svg>
|
||||
</pattern>
|
||||
<linearGradient id="fade" y2="1" x2="0">
|
||||
<stop offset="0" stop-color="white" stop-opacity=".52"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<mask id="fadeMask" maskContentUnits="objectBoundingBox">
|
||||
<rect width="1" height="1" fill="url(#fade)"/>
|
||||
</mask>
|
||||
</defs>
|
||||
<rect width="100%" height="100%" fill="url(#tilePattern)" mask="url(#fadeMask)"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.4 KiB |
@@ -393,6 +393,7 @@
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"save": "workspace::Save",
|
||||
"ctrl-s": "workspace::Save",
|
||||
@@ -731,28 +732,48 @@
|
||||
"up": "menu::SelectPrevious",
|
||||
"down": "menu::SelectNext",
|
||||
"enter": "menu::Confirm",
|
||||
"alt-y": "git::StageFile",
|
||||
"alt-shift-y": "git::UnstageFile",
|
||||
"ctrl-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-enter": "menu::SecondaryConfirm"
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "git::Commit"
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -767,6 +788,7 @@
|
||||
"escape": "git_panel::FocusChanges",
|
||||
"tab": "git_panel::FocusChanges",
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
@@ -840,21 +862,22 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
"ctrl-shift-l": "terminal::Clear",
|
||||
"ctrl-shift-w": "pane::CloseActiveItem",
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
|
||||
@@ -31,13 +31,13 @@
|
||||
"enter": "menu::Confirm",
|
||||
"ctrl-enter": "menu::SecondaryConfirm",
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"ctrl-escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"escape": "menu::Cancel",
|
||||
"alt-shift-enter": "menu::Restart",
|
||||
"cmd-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"cmd-o": "workspace::Open",
|
||||
"cmd-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
@@ -514,6 +514,7 @@
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-k s": "workspace::SaveWithoutFormat",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"cmd-shift-s": "workspace::SaveAs",
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"ctrl-`": "terminal_panel::ToggleFocus",
|
||||
@@ -763,28 +764,25 @@
|
||||
"cmd-up": "menu::SelectFirst",
|
||||
"cmd-down": "menu::SelectLast",
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"cmd-shift-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"cmd-y": "git::StageFile",
|
||||
"cmd-shift-y": "git::UnstageFile",
|
||||
"alt-down": "git_panel::FocusEditor",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit"
|
||||
"cmd-enter": "git::Commit",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "git::Commit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AskPass > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -800,11 +798,27 @@
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"escape": "menu::Cancel",
|
||||
"cmd-enter": "git::Commit",
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
|
||||
18
assets/prompts/assistant_system_prompt.hbs
Normal file
18
assets/prompts/assistant_system_prompt.hbs
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an AI assistant integrated into a text editor. Your goal is to do one of the following two things:
|
||||
|
||||
1. Help users answer questions and perform tasks related to their codebase.
|
||||
2. Answer general-purpose questions unrelated to their particular codebase.
|
||||
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
|
||||
You should only perform actions that modify the user’s system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user’s system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
|
||||
Be concise and direct in your responses.
|
||||
|
||||
The user has opened a project that contains the following top-level directories/files:
|
||||
|
||||
{{#each worktree_root_names}}
|
||||
- {{this}}
|
||||
{{/each}}
|
||||
@@ -555,6 +555,12 @@
|
||||
//
|
||||
// Default: icon
|
||||
"status_style": "icon",
|
||||
// What branch name to use if init.defaultBranch
|
||||
// is not set
|
||||
//
|
||||
// Default: main
|
||||
"fallback_branch_name": "main",
|
||||
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the git panel.
|
||||
//
|
||||
@@ -594,6 +600,13 @@
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
},
|
||||
// The model to use when applying edits from the assistant.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
}
|
||||
},
|
||||
// The settings for slash commands.
|
||||
@@ -841,11 +854,11 @@
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Show unstaged hunks with a transparent background (default):
|
||||
// "hunk_style": "transparent"
|
||||
// 2. Show unstaged hunks with a pattern background:
|
||||
// "hunk_style": "pattern"
|
||||
"hunk_style": "staged_border"
|
||||
// 1. Show unstaged hunks filled and staged hunks hollow:
|
||||
// "hunk_style": "staged_hollow"
|
||||
// 2. Show unstaged hunks hollow and staged hunks filled:
|
||||
// "hunk_style": "unstaged_hollow"
|
||||
"hunk_style": "staged_hollow"
|
||||
},
|
||||
// Configuration for how direnv configuration should be loaded. May take 2 values:
|
||||
// 1. Load direnv configuration using `direnv export json` directly.
|
||||
@@ -1300,8 +1313,7 @@
|
||||
},
|
||||
// Settings for auto-closing of JSX tags.
|
||||
"jsx_tag_auto_close": {
|
||||
// // Whether to auto-close JSX tags.
|
||||
// "enabled": true
|
||||
"enabled": true
|
||||
},
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
|
||||
@@ -6,15 +6,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -105,9 +97,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -383,6 +375,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -394,15 +391,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Hard",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -493,9 +482,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -771,6 +760,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -782,15 +776,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Soft",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -881,9 +867,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -1159,6 +1145,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -1170,15 +1161,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1269,9 +1252,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1547,6 +1530,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1558,15 +1546,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Hard",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1657,9 +1637,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1935,6 +1915,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1946,15 +1931,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Soft",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -2045,9 +2022,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -2323,6 +2300,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -96,9 +96,9 @@
|
||||
"terminal.ansi.bright_white": "#dce0e5ff",
|
||||
"terminal.ansi.dim_white": "#575d65ff",
|
||||
"link_text.hover": "#74ade8ff",
|
||||
"version_control_added": "#a7c088ff",
|
||||
"version_control_modified": "#dec184ff",
|
||||
"version_control_deleted": "#d07277ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#dec184ff",
|
||||
"conflict.background": "#dec1841a",
|
||||
"conflict.border": "#5d4c2fff",
|
||||
@@ -475,9 +475,9 @@
|
||||
"terminal.ansi.bright_white": "#242529ff",
|
||||
"terminal.ansi.dim_white": "#97979aff",
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control_added": "#669f59ff",
|
||||
"version_control_modified": "#a48819ff",
|
||||
"version_control_deleted": "#d36151ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#a48819ff",
|
||||
"conflict.background": "#faf2e6ff",
|
||||
"conflict.border": "#f4e7d1ff",
|
||||
|
||||
@@ -186,8 +186,12 @@ fn init_language_model_settings(cx: &mut App) {
|
||||
|
||||
fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let active_model_provider_name =
|
||||
LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let active_model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let editor_provider_name =
|
||||
LanguageModelProviderId::from(settings.editor_model.provider.clone());
|
||||
let editor_model_id = LanguageModelId::from(settings.editor_model.model.clone());
|
||||
let inline_alternatives = settings
|
||||
.inline_alternatives
|
||||
.iter()
|
||||
@@ -199,7 +203,8 @@ fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
registry.select_active_model(&active_model_provider_name, &active_model_id, cx);
|
||||
registry.select_editor_model(&editor_provider_name, &editor_model_id, cx);
|
||||
registry.select_inline_alternative_models(inline_alternatives, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -297,7 +297,8 @@ impl AssistantPanel {
|
||||
&LanguageModelRegistry::global(cx),
|
||||
window,
|
||||
|this, _, event: &language_model::Event, window, cx| match event {
|
||||
language_model::Event::ActiveModelChanged => {
|
||||
language_model::Event::ActiveModelChanged
|
||||
| language_model::Event::EditorModelChanged => {
|
||||
this.completion_provider_changed(window, cx);
|
||||
}
|
||||
language_model::Event::ProviderStateChanged => {
|
||||
|
||||
@@ -386,7 +386,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1674,7 +1673,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -2333,7 +2331,6 @@ struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
|
||||
@@ -702,7 +702,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
|
||||
@@ -59,6 +59,7 @@ prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
rope.workspace = true
|
||||
scripting_tool.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
@@ -81,8 +82,8 @@ zed_actions.workspace = true
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
language_model = { workspace = true, "features" = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
indoc.workspace = true
|
||||
|
||||
@@ -10,6 +10,7 @@ use gpui::{
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use scripting_tool::{ScriptingTool, ScriptingToolInput};
|
||||
use settings::Settings as _;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Disclosure, KeyBinding};
|
||||
@@ -28,6 +29,7 @@ pub struct ActiveThread {
|
||||
messages: Vec<MessageId>,
|
||||
list_state: ListState,
|
||||
rendered_messages_by_id: HashMap<MessageId, Entity<Markdown>>,
|
||||
rendered_scripting_tool_uses: HashMap<LanguageModelToolUseId, Entity<Markdown>>,
|
||||
editing_message: Option<(MessageId, EditMessageState)>,
|
||||
expanded_tool_uses: HashMap<LanguageModelToolUseId, bool>,
|
||||
last_error: Option<ThreadError>,
|
||||
@@ -58,6 +60,7 @@ impl ActiveThread {
|
||||
save_thread_task: None,
|
||||
messages: Vec::new(),
|
||||
rendered_messages_by_id: HashMap::default(),
|
||||
rendered_scripting_tool_uses: HashMap::default(),
|
||||
expanded_tool_uses: HashMap::default(),
|
||||
list_state: ListState::new(0, ListAlignment::Bottom, px(1024.), {
|
||||
let this = cx.entity().downgrade();
|
||||
@@ -73,6 +76,16 @@ impl ActiveThread {
|
||||
|
||||
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
|
||||
this.push_message(&message.id, message.text.clone(), window, cx);
|
||||
|
||||
for tool_use in thread.read(cx).scripting_tool_uses_for_message(message.id) {
|
||||
this.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this
|
||||
@@ -239,9 +252,35 @@ impl ActiveThread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Renders the input of a scripting tool use to Markdown.
|
||||
///
|
||||
/// Does nothing if the tool use does not correspond to the scripting tool.
|
||||
fn render_scripting_tool_use_markdown(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
tool_name: &str,
|
||||
tool_input: serde_json::Value,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if tool_name != ScriptingTool::NAME {
|
||||
return;
|
||||
}
|
||||
|
||||
let lua_script = serde_json::from_value::<ScriptingToolInput>(tool_input)
|
||||
.map(|input| input.lua_script)
|
||||
.unwrap_or_default();
|
||||
|
||||
let lua_script =
|
||||
self.render_markdown(format!("```lua\n{lua_script}\n```").into(), window, cx);
|
||||
|
||||
self.rendered_scripting_tool_uses
|
||||
.insert(tool_use_id, lua_script);
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
_: &Entity<Thread>,
|
||||
_thread: &Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -296,7 +335,19 @@ impl ActiveThread {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished { .. } => {
|
||||
ThreadEvent::ToolFinished {
|
||||
pending_tool_use, ..
|
||||
} => {
|
||||
if let Some(tool_use) = pending_tool_use {
|
||||
self.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
if self.thread.read(cx).all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
@@ -396,7 +447,7 @@ impl ActiveThread {
|
||||
};
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.send_to_model(model, RequestKind::Chat, false, cx)
|
||||
thread.send_to_model(model, RequestKind::Chat, cx)
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -445,12 +496,17 @@ impl ActiveThread {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let context = self.thread.read(cx).context_for_message(message_id);
|
||||
let tool_uses = self.thread.read(cx).tool_uses_for_message(message_id);
|
||||
let colors = cx.theme().colors();
|
||||
let thread = self.thread.read(cx);
|
||||
|
||||
let context = thread.context_for_message(message_id);
|
||||
let tool_uses = thread.tool_uses_for_message(message_id);
|
||||
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
|
||||
|
||||
// Don't render user messages that are just there for returning tool results.
|
||||
if message.role == Role::User && self.thread.read(cx).message_has_tool_results(message_id) {
|
||||
if message.role == Role::User
|
||||
&& (thread.message_has_tool_results(message_id)
|
||||
|| thread.message_has_scripting_tool_results(message_id))
|
||||
{
|
||||
return Empty.into_any();
|
||||
}
|
||||
|
||||
@@ -463,6 +519,8 @@ impl ActiveThread {
|
||||
.filter(|(id, _)| *id == message_id)
|
||||
.map(|(_, state)| state.editor.clone());
|
||||
|
||||
let colors = cx.theme().colors();
|
||||
|
||||
let message_content = v_flex()
|
||||
.child(
|
||||
if let Some(edit_message_editor) = edit_message_editor.clone() {
|
||||
@@ -594,20 +652,26 @@ impl ActiveThread {
|
||||
)
|
||||
.child(message_content),
|
||||
),
|
||||
Role::Assistant => div()
|
||||
Role::Assistant => v_flex()
|
||||
.id(("message-container", ix))
|
||||
.child(message_content)
|
||||
.map(|parent| {
|
||||
if tool_uses.is_empty() {
|
||||
if tool_uses.is_empty() && scripting_tool_uses.is_empty() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
parent.child(
|
||||
v_flex().children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
),
|
||||
v_flex()
|
||||
.children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
)
|
||||
.children(
|
||||
scripting_tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_scripting_tool_use(tool_use, cx)),
|
||||
),
|
||||
)
|
||||
}),
|
||||
Role::System => div().id(("message-container", ix)).py_1().px_2().child(
|
||||
@@ -641,8 +705,13 @@ impl ActiveThread {
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.02))
|
||||
.when(is_open, |element| element.border_b_1().rounded_t(px(6.)))
|
||||
.when(!is_open, |element| element.rounded_md())
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t(px(6.))
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -716,6 +785,119 @@ impl ActiveThread {
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_scripting_tool_use(
|
||||
&self,
|
||||
tool_use: ToolUse,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let is_open = self
|
||||
.expanded_tool_uses
|
||||
.get(&tool_use.id)
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
div().px_2p5().child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.py_0p5()
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.02))
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t(px(6.))
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("tool-use-disclosure", is_open).on_click(
|
||||
cx.listener({
|
||||
let tool_use_id = tool_use.id.clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
.expanded_tool_uses
|
||||
.entry(tool_use_id.clone())
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
}),
|
||||
))
|
||||
.child(Label::new(tool_use.name)),
|
||||
)
|
||||
.child(
|
||||
Label::new(match tool_use.status {
|
||||
ToolUseStatus::Pending => "Pending",
|
||||
ToolUseStatus::Running => "Running",
|
||||
ToolUseStatus::Finished(_) => "Finished",
|
||||
ToolUseStatus::Error(_) => "Error",
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !is_open {
|
||||
return parent;
|
||||
}
|
||||
|
||||
let lua_script_markdown =
|
||||
self.rendered_scripting_tool_uses.get(&tool_use.id).cloned();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.py_1()
|
||||
.px_2p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Label::new("Input:"))
|
||||
.map(|parent| {
|
||||
if let Some(markdown) = lua_script_markdown {
|
||||
parent.child(markdown)
|
||||
} else {
|
||||
parent.child(Label::new(
|
||||
"Failed to render script input to Markdown",
|
||||
))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|parent| match tool_use.status {
|
||||
ToolUseStatus::Finished(output) => parent.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.py_1()
|
||||
.px_2p5()
|
||||
.child(Label::new("Result:"))
|
||||
.child(Label::new(output)),
|
||||
),
|
||||
ToolUseStatus::Error(err) => parent.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.py_1()
|
||||
.px_2p5()
|
||||
.child(Label::new("Error:"))
|
||||
.child(Label::new(err)),
|
||||
),
|
||||
ToolUseStatus::Pending | ToolUseStatus::Running => parent,
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ActiveThread {
|
||||
|
||||
@@ -16,6 +16,7 @@ mod terminal_inline_assistant;
|
||||
mod thread;
|
||||
mod thread_history;
|
||||
mod thread_store;
|
||||
mod tool_selector;
|
||||
mod tool_use;
|
||||
mod ui;
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ impl AssistantPanel {
|
||||
log::info!("[assistant2-debug] initializing ThreadStore");
|
||||
let thread_store = workspace.update(&mut cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::new(project, tools.clone(), cx)
|
||||
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})??;
|
||||
log::info!("[assistant2-debug] finished initializing ThreadStore");
|
||||
|
||||
@@ -166,22 +166,24 @@ impl AssistantPanel {
|
||||
let history_store =
|
||||
cx.new(|cx| HistoryStore::new(thread_store.clone(), context_store.clone(), cx));
|
||||
|
||||
let thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
Self {
|
||||
active_view: ActiveView::Thread,
|
||||
workspace,
|
||||
project: project.clone(),
|
||||
fs: fs.clone(),
|
||||
language_registry: language_registry.clone(),
|
||||
language_registry,
|
||||
thread_store: thread_store.clone(),
|
||||
thread: cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
language_registry,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
thread,
|
||||
message_editor,
|
||||
context_store,
|
||||
context_editor: None,
|
||||
|
||||
@@ -167,8 +167,8 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> SharedString {
|
||||
"Enter the URL that you would like to fetch".into()
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
Some("Enter the URL that you would like to fetch".into())
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
|
||||
@@ -25,7 +25,7 @@ use crate::{
|
||||
|
||||
pub struct ContextStrip {
|
||||
context_store: Entity<ContextStore>,
|
||||
pub context_picker: Entity<ContextPicker>,
|
||||
context_picker: Entity<ContextPicker>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
focus_handle: FocusHandle,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
@@ -36,7 +36,6 @@ pub struct ContextStrip {
|
||||
}
|
||||
|
||||
impl ContextStrip {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
|
||||
@@ -480,7 +480,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1451,7 +1450,6 @@ struct InlineAssistScrollLock {
|
||||
}
|
||||
|
||||
impl EditorInlineAssists {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
|
||||
Self {
|
||||
@@ -1563,7 +1561,6 @@ pub struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
|
||||
@@ -816,7 +816,6 @@ impl InlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<BufferCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_buffer(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -976,7 +975,6 @@ impl TerminalInlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<TerminalCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_terminal(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use editor::actions::MoveUp;
|
||||
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
use file_icons::FileIcons;
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
|
||||
@@ -15,7 +16,7 @@ use std::time::Duration;
|
||||
use text::Bias;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Switch,
|
||||
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle,
|
||||
Tooltip,
|
||||
};
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
@@ -27,6 +28,7 @@ use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::thread::{RequestKind, Thread};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_selector::ToolSelector;
|
||||
use crate::{Chat, ChatMode, RemoveAllContext, ToggleContextPicker};
|
||||
|
||||
pub struct MessageEditor {
|
||||
@@ -38,7 +40,8 @@ pub struct MessageEditor {
|
||||
inline_context_picker: Entity<ContextPicker>,
|
||||
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
model_selector: Entity<AssistantModelSelector>,
|
||||
use_tools: bool,
|
||||
tool_selector: Entity<ToolSelector>,
|
||||
edits_expanded: bool,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -51,6 +54,7 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let tools = thread.read(cx).tools().clone();
|
||||
let context_store = cx.new(|_cx| ContextStore::new(workspace.clone()));
|
||||
let context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let inline_context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
@@ -116,13 +120,13 @@ impl MessageEditor {
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
use_tools: false,
|
||||
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
|
||||
edits_expanded: false,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
fn toggle_chat_mode(&mut self, _: &ChatMode, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.use_tools = !self.use_tools;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -189,14 +193,13 @@ impl MessageEditor {
|
||||
|
||||
let thread = self.thread.clone();
|
||||
let context_store = self.context_store.clone();
|
||||
let use_tools = self.use_tools;
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
refresh_task.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
|
||||
thread.insert_user_message(user_message, context, cx);
|
||||
thread.send_to_model(model, request_kind, use_tools, cx);
|
||||
thread.send_to_model(model, request_kind, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -303,6 +306,9 @@ impl Render for MessageEditor {
|
||||
px(64.)
|
||||
};
|
||||
|
||||
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx);
|
||||
let changed_buffers_count = changed_buffers.len();
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.when(is_streaming_completion, |parent| {
|
||||
@@ -363,6 +369,109 @@ impl Render for MessageEditor {
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(changed_buffers_count > 0, |parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.mx_2()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.p_2()
|
||||
.child(
|
||||
Disclosure::new("edits-disclosure", self.edits_expanded)
|
||||
.on_click(cx.listener(|this, _ev, _window, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Label::new("Edits")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("•").size(LabelSize::XSmall).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_buffers_count,
|
||||
if changed_buffers_count == 1 {
|
||||
"file"
|
||||
} else {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.when(self.edits_expanded, |parent| {
|
||||
parent.child(
|
||||
v_flex().bg(cx.theme().colors().editor_background).children(
|
||||
changed_buffers.enumerate().flat_map(|(index, buffer)| {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
|
||||
let parent_label = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!(
|
||||
"{}{}",
|
||||
parent_str,
|
||||
std::path::MAIN_SEPARATOR_STR
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let name_label = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small)
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
let element = div()
|
||||
.p_2()
|
||||
.when(index + 1 < changed_buffers_count, |parent| {
|
||||
parent
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_b_1()
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(file_icon)
|
||||
.child(
|
||||
// TODO: handle overflow
|
||||
h_flex()
|
||||
.children(parent_label)
|
||||
.children(name_label),
|
||||
)
|
||||
// TODO: show lines changed
|
||||
.child(Label::new("+").color(Color::Created))
|
||||
.child(Label::new("-").color(Color::Deleted)),
|
||||
);
|
||||
|
||||
Some(element)
|
||||
}),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
v_flex()
|
||||
.key_context("MessageEditor")
|
||||
@@ -428,25 +537,7 @@ impl Render for MessageEditor {
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(
|
||||
Switch::new("use-tools", self.use_tools.into())
|
||||
.label("Tools")
|
||||
.on_click(cx.listener(
|
||||
|this, selection, _window, _cx| {
|
||||
this.use_tools = match selection {
|
||||
ToggleState::Selected => true,
|
||||
ToggleState::Unselected
|
||||
| ToggleState::Indeterminate => false,
|
||||
};
|
||||
},
|
||||
))
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&ChatMode,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.child(h_flex().gap_2().child(self.tool_selector.clone()))
|
||||
.child(
|
||||
h_flex().gap_1().child(self.model_selector.clone()).child(
|
||||
ButtonLike::new("submit-message")
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{App, Context, Entity, EventEmitter, SharedString, Task};
|
||||
use gpui::{App, AppContext, Context, Entity, EventEmitter, SharedString, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
@@ -13,8 +13,10 @@ use language_model::{
|
||||
Role, StopReason,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use scripting_tool::{ScriptingSession, ScriptingTool};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::{post_inc, TryFutureExt as _};
|
||||
use util::{post_inc, ResultExt, TryFutureExt as _};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::context::{attach_context_to_message, ContextId, ContextSnapshot};
|
||||
@@ -73,16 +75,22 @@ pub struct Thread {
|
||||
completion_count: usize,
|
||||
pending_completions: Vec<PendingCompletion>,
|
||||
project: Entity<Project>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
tool_use: ToolUseState,
|
||||
scripting_session: Entity<ScriptingSession>,
|
||||
scripting_tool_use: ToolUseState,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_cx: &mut Context<Self>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let scripting_session = cx.new(|cx| ScriptingSession::new(project.clone(), cx));
|
||||
|
||||
Self {
|
||||
id: ThreadId::new(),
|
||||
updated_at: Utc::now(),
|
||||
@@ -95,8 +103,11 @@ impl Thread {
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
project,
|
||||
prompt_builder,
|
||||
tools,
|
||||
tool_use: ToolUseState::new(),
|
||||
scripting_session,
|
||||
scripting_tool_use: ToolUseState::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,7 +116,8 @@ impl Thread {
|
||||
saved: SavedThread,
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_cx: &mut Context<Self>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let next_message_id = MessageId(
|
||||
saved
|
||||
@@ -114,7 +126,11 @@ impl Thread {
|
||||
.map(|message| message.id.0 + 1)
|
||||
.unwrap_or(0),
|
||||
);
|
||||
let tool_use = ToolUseState::from_saved_messages(&saved.messages);
|
||||
let tool_use =
|
||||
ToolUseState::from_saved_messages(&saved.messages, |name| name != ScriptingTool::NAME);
|
||||
let scripting_tool_use =
|
||||
ToolUseState::from_saved_messages(&saved.messages, |name| name == ScriptingTool::NAME);
|
||||
let scripting_session = cx.new(|cx| ScriptingSession::new(project.clone(), cx));
|
||||
|
||||
Self {
|
||||
id,
|
||||
@@ -136,8 +152,11 @@ impl Thread {
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
project,
|
||||
prompt_builder,
|
||||
tools,
|
||||
tool_use,
|
||||
scripting_session,
|
||||
scripting_tool_use,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,42 +217,65 @@ impl Thread {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.tool_use.pending_tool_uses()
|
||||
}
|
||||
|
||||
/// Returns whether all of the tool uses have finished running.
|
||||
pub fn all_tools_finished(&self) -> bool {
|
||||
let mut all_pending_tool_uses = self
|
||||
.tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.chain(self.scripting_tool_use.pending_tool_uses());
|
||||
|
||||
// If the only pending tool uses left are the ones with errors, then that means that we've finished running all
|
||||
// of the pending tools.
|
||||
self.pending_tool_uses()
|
||||
.into_iter()
|
||||
.all(|tool_use| tool_use.status.is_error())
|
||||
all_pending_tool_uses.all(|tool_use| tool_use.status.is_error())
|
||||
}
|
||||
|
||||
pub fn tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
|
||||
self.tool_use.tool_uses_for_message(id)
|
||||
}
|
||||
|
||||
pub fn scripting_tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
|
||||
self.scripting_tool_use.tool_uses_for_message(id)
|
||||
}
|
||||
|
||||
pub fn tool_results_for_message(&self, id: MessageId) -> Vec<&LanguageModelToolResult> {
|
||||
self.tool_use.tool_results_for_message(id)
|
||||
}
|
||||
|
||||
pub fn scripting_tool_results_for_message(
|
||||
&self,
|
||||
id: MessageId,
|
||||
) -> Vec<&LanguageModelToolResult> {
|
||||
self.scripting_tool_use.tool_results_for_message(id)
|
||||
}
|
||||
|
||||
pub fn scripting_changed_buffers<'a>(
|
||||
&self,
|
||||
cx: &'a App,
|
||||
) -> impl ExactSizeIterator<Item = &'a Entity<language::Buffer>> {
|
||||
self.scripting_session.read(cx).changed_buffers()
|
||||
}
|
||||
|
||||
pub fn message_has_tool_results(&self, message_id: MessageId) -> bool {
|
||||
self.tool_use.message_has_tool_results(message_id)
|
||||
}
|
||||
|
||||
pub fn message_has_scripting_tool_results(&self, message_id: MessageId) -> bool {
|
||||
self.scripting_tool_use.message_has_tool_results(message_id)
|
||||
}
|
||||
|
||||
pub fn insert_user_message(
|
||||
&mut self,
|
||||
text: impl Into<String>,
|
||||
context: Vec<ContextSnapshot>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
) -> MessageId {
|
||||
let message_id = self.insert_message(Role::User, text, cx);
|
||||
let context_ids = context.iter().map(|context| context.id).collect::<Vec<_>>();
|
||||
self.context
|
||||
.extend(context.into_iter().map(|context| (context.id, context)));
|
||||
self.context_by_message.insert(message_id, context_ids);
|
||||
message_id
|
||||
}
|
||||
|
||||
pub fn insert_message(
|
||||
@@ -306,23 +348,30 @@ impl Thread {
|
||||
&mut self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request_kind: RequestKind,
|
||||
use_tools: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let mut request = self.to_completion_request(request_kind, cx);
|
||||
request.tools = {
|
||||
let mut tools = Vec::new();
|
||||
|
||||
if use_tools {
|
||||
request.tools = self
|
||||
.tools()
|
||||
.tools(cx)
|
||||
.into_iter()
|
||||
.map(|tool| LanguageModelRequestTool {
|
||||
if self.tools.is_scripting_tool_enabled() {
|
||||
tools.push(LanguageModelRequestTool {
|
||||
name: ScriptingTool::NAME.into(),
|
||||
description: ScriptingTool::DESCRIPTION.into(),
|
||||
input_schema: ScriptingTool::input_schema(),
|
||||
});
|
||||
}
|
||||
|
||||
tools.extend(self.tools().enabled_tools(cx).into_iter().map(|tool| {
|
||||
LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema: tool.input_schema(),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
tools
|
||||
};
|
||||
|
||||
self.stream_completion(request, model, cx);
|
||||
}
|
||||
@@ -330,10 +379,27 @@ impl Thread {
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
request_kind: RequestKind,
|
||||
_cx: &App,
|
||||
cx: &App,
|
||||
) -> LanguageModelRequest {
|
||||
let worktree_root_names = self
|
||||
.project
|
||||
.read(cx)
|
||||
.worktree_root_names(cx)
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>();
|
||||
let system_prompt = self
|
||||
.prompt_builder
|
||||
.generate_assistant_system_prompt(worktree_root_names)
|
||||
.context("failed to generate assistant system prompt")
|
||||
.log_err()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut request = LanguageModelRequest {
|
||||
messages: vec![],
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: vec![MessageContent::Text(system_prompt)],
|
||||
cache: true,
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
@@ -351,10 +417,13 @@ impl Thread {
|
||||
content: Vec::new(),
|
||||
cache: false,
|
||||
};
|
||||
|
||||
match request_kind {
|
||||
RequestKind::Chat => {
|
||||
self.tool_use
|
||||
.attach_tool_results(message.id, &mut request_message);
|
||||
self.scripting_tool_use
|
||||
.attach_tool_results(message.id, &mut request_message);
|
||||
}
|
||||
RequestKind::Summarize => {
|
||||
// We don't care about tool use during summarization.
|
||||
@@ -371,11 +440,13 @@ impl Thread {
|
||||
RequestKind::Chat => {
|
||||
self.tool_use
|
||||
.attach_tool_uses(message.id, &mut request_message);
|
||||
self.scripting_tool_use
|
||||
.attach_tool_uses(message.id, &mut request_message);
|
||||
}
|
||||
RequestKind::Summarize => {
|
||||
// We don't care about tool use during summarization.
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
request.messages.push(request_message);
|
||||
}
|
||||
@@ -439,7 +510,7 @@ impl Thread {
|
||||
// Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it
|
||||
// will result in duplicating the text of the chunk in the rendered Markdown.
|
||||
thread.insert_message(Role::Assistant, chunk, cx);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::ToolUse(tool_use) => {
|
||||
@@ -448,9 +519,15 @@ impl Thread {
|
||||
.iter()
|
||||
.rfind(|message| message.role == Role::Assistant)
|
||||
{
|
||||
thread
|
||||
.tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
if tool_use.name.as_ref() == ScriptingTool::NAME {
|
||||
thread
|
||||
.scripting_tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
} else {
|
||||
thread
|
||||
.tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -569,7 +646,9 @@ impl Thread {
|
||||
}
|
||||
|
||||
pub fn use_pending_tools(&mut self, cx: &mut Context<Self>) {
|
||||
let request = self.to_completion_request(RequestKind::Chat, cx);
|
||||
let pending_tool_uses = self
|
||||
.tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.filter(|tool_use| tool_use.status.is_idle())
|
||||
@@ -578,11 +657,50 @@ impl Thread {
|
||||
|
||||
for tool_use in pending_tool_uses {
|
||||
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
let task = tool.run(tool_use.input, self.project.clone(), cx);
|
||||
let task = tool.run(tool_use.input, &request.messages, self.project.clone(), cx);
|
||||
|
||||
self.insert_tool_output(tool_use.id.clone(), task, cx);
|
||||
}
|
||||
}
|
||||
|
||||
let pending_scripting_tool_uses = self
|
||||
.scripting_tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.filter(|tool_use| tool_use.status.is_idle())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for scripting_tool_use in pending_scripting_tool_uses {
|
||||
let task = match ScriptingTool::deserialize_input(scripting_tool_use.input) {
|
||||
Err(err) => Task::ready(Err(err.into())),
|
||||
Ok(input) => {
|
||||
let (script_id, script_task) =
|
||||
self.scripting_session.update(cx, move |session, cx| {
|
||||
session.run_script(input.lua_script, cx)
|
||||
});
|
||||
|
||||
let session = self.scripting_session.clone();
|
||||
cx.spawn(|_, cx| async move {
|
||||
script_task.await;
|
||||
|
||||
let message = session.read_with(&cx, |session, _cx| {
|
||||
// Using a id to get the script output seems impractical.
|
||||
// Why not just include it in the Task result?
|
||||
// This is because we'll later report the script state as it runs,
|
||||
session
|
||||
.get(script_id)
|
||||
.output_message_for_llm()
|
||||
.expect("Script shouldn't still be running")
|
||||
})?;
|
||||
|
||||
Ok(message)
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
self.insert_scripting_tool_output(scripting_tool_use.id.clone(), task, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_tool_output(
|
||||
@@ -597,11 +715,14 @@ impl Thread {
|
||||
let output = output.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
let pending_tool_use = thread
|
||||
.tool_use
|
||||
.insert_tool_output(tool_use_id.clone(), output);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished { tool_use_id });
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -611,6 +732,35 @@ impl Thread {
|
||||
.run_pending_tool(tool_use_id, insert_output_task);
|
||||
}
|
||||
|
||||
pub fn insert_scripting_tool_output(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Task<Result<String>>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let insert_output_task = cx.spawn(|thread, mut cx| {
|
||||
let tool_use_id = tool_use_id.clone();
|
||||
async move {
|
||||
let output = output.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
let pending_tool_use = thread
|
||||
.scripting_tool_use
|
||||
.insert_tool_output(tool_use_id.clone(), output);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
|
||||
self.scripting_tool_use
|
||||
.run_pending_tool(tool_use_id, insert_output_task);
|
||||
}
|
||||
|
||||
pub fn send_tool_results_to_model(
|
||||
&mut self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
@@ -625,7 +775,7 @@ impl Thread {
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
self.send_to_model(model, RequestKind::Chat, true, cx);
|
||||
self.send_to_model(model, RequestKind::Chat, cx);
|
||||
}
|
||||
|
||||
/// Cancels the last pending completion, if there are any pending.
|
||||
@@ -660,6 +810,8 @@ pub enum ThreadEvent {
|
||||
ToolFinished {
|
||||
#[allow(unused)]
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
/// The pending tool use that corresponds to this tool.
|
||||
pending_tool_use: Option<PendingToolUse>,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ use heed::types::{SerdeBincode, SerdeJson};
|
||||
use heed::Database;
|
||||
use language_model::{LanguageModelToolUseId, Role};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -28,6 +29,7 @@ pub fn init(cx: &mut App) {
|
||||
pub struct ThreadStore {
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
context_server_tool_ids: HashMap<Arc<str>, Vec<ToolId>>,
|
||||
threads: Vec<SavedThreadMetadata>,
|
||||
@@ -37,6 +39,7 @@ impl ThreadStore {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut App,
|
||||
) -> Result<Entity<Self>> {
|
||||
let this = cx.new(|cx| {
|
||||
@@ -48,6 +51,7 @@ impl ThreadStore {
|
||||
let this = Self {
|
||||
project,
|
||||
tools,
|
||||
prompt_builder,
|
||||
context_server_manager,
|
||||
context_server_tool_ids: HashMap::default(),
|
||||
threads: Vec::new(),
|
||||
@@ -77,7 +81,14 @@ impl ThreadStore {
|
||||
}
|
||||
|
||||
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
|
||||
cx.new(|cx| Thread::new(self.project.clone(), self.tools.clone(), cx))
|
||||
cx.new(|cx| {
|
||||
Thread::new(
|
||||
self.project.clone(),
|
||||
self.tools.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_thread(
|
||||
@@ -101,6 +112,7 @@ impl ThreadStore {
|
||||
thread,
|
||||
this.project.clone(),
|
||||
this.tools.clone(),
|
||||
this.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -116,28 +128,35 @@ impl ThreadStore {
|
||||
updated_at: thread.updated_at(),
|
||||
messages: thread
|
||||
.messages()
|
||||
.map(|message| SavedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: thread
|
||||
.map(|message| {
|
||||
let all_tool_uses = thread
|
||||
.tool_uses_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(thread.scripting_tool_uses_for_message(message.id))
|
||||
.map(|tool_use| SavedToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: tool_use.input,
|
||||
})
|
||||
.collect(),
|
||||
tool_results: thread
|
||||
.collect();
|
||||
let all_tool_results = thread
|
||||
.tool_results_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(thread.scripting_tool_results_for_message(message.id))
|
||||
.map(|tool_result| SavedToolResult {
|
||||
tool_use_id: tool_result.tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: tool_result.content.clone(),
|
||||
})
|
||||
.collect(),
|
||||
.collect();
|
||||
|
||||
SavedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: all_tool_uses,
|
||||
tool_results: all_tool_results,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
113
crates/assistant2/src/tool_selector.rs
Normal file
113
crates/assistant2/src/tool_selector.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use gpui::Entity;
|
||||
use scripting_tool::ScriptingTool;
|
||||
use ui::{prelude::*, ContextMenu, IconButtonShape, PopoverMenu, Tooltip};
|
||||
|
||||
pub struct ToolSelector {
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
}
|
||||
|
||||
impl ToolSelector {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
|
||||
Self { tools }
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
|
||||
let all_tools_enabled = self.tools.are_all_tools_enabled();
|
||||
menu = menu.header("Tools").toggleable_entry(
|
||||
"All Tools",
|
||||
all_tools_enabled,
|
||||
IconPosition::End,
|
||||
None,
|
||||
{
|
||||
let tools = self.tools.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_enabled {
|
||||
tools.disable_all_tools(cx);
|
||||
} else {
|
||||
tools.enable_all_tools();
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
for (source, tools) in tools_by_source {
|
||||
let mut tools = tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
let source = tool.source();
|
||||
let name = tool.name().into();
|
||||
let is_enabled = self.tools.is_enabled(&source, &name);
|
||||
|
||||
(source, name, is_enabled)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if ToolSource::Native == source {
|
||||
tools.push((
|
||||
ToolSource::Native,
|
||||
ScriptingTool::NAME.into(),
|
||||
self.tools.is_scripting_tool_enabled(),
|
||||
));
|
||||
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
|
||||
}
|
||||
|
||||
menu = match source {
|
||||
ToolSource::Native => menu.header("Zed"),
|
||||
ToolSource::ContextServer { id } => menu.separator().header(id),
|
||||
};
|
||||
|
||||
for (source, name, is_enabled) in tools {
|
||||
menu =
|
||||
menu.toggleable_entry(name.clone(), is_enabled, IconPosition::End, None, {
|
||||
let tools = self.tools.clone();
|
||||
move |_window, _cx| {
|
||||
if name.as_ref() == ScriptingTool::NAME {
|
||||
if is_enabled {
|
||||
tools.disable_scripting_tool();
|
||||
} else {
|
||||
tools.enable_scripting_tool();
|
||||
}
|
||||
} else {
|
||||
if is_enabled {
|
||||
tools.disable(source.clone(), &[name.clone()]);
|
||||
} else {
|
||||
tools.enable(source.clone(), &[name.clone()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ToolSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement {
|
||||
let this = cx.entity().clone();
|
||||
PopoverMenu::new("tool-selector")
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("tool-selector-button", IconName::SettingsAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted),
|
||||
Tooltip::text("Customize Tools"),
|
||||
)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
}
|
||||
}
|
||||
@@ -46,25 +46,39 @@ impl ToolUseState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_saved_messages(messages: &[SavedMessage]) -> Self {
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SavedMessage`]s.
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
pub fn from_saved_messages(
|
||||
messages: &[SavedMessage],
|
||||
mut filter_by_tool_name: impl FnMut(&str) -> bool,
|
||||
) -> Self {
|
||||
let mut this = Self::new();
|
||||
let mut tool_names_by_id = HashMap::default();
|
||||
|
||||
for message in messages {
|
||||
match message.role {
|
||||
Role::Assistant => {
|
||||
if !message.tool_uses.is_empty() {
|
||||
this.tool_uses_by_assistant_message.insert(
|
||||
message.id,
|
||||
message
|
||||
.tool_uses
|
||||
let tool_uses = message
|
||||
.tool_uses
|
||||
.iter()
|
||||
.filter(|tool_use| (filter_by_tool_name)(tool_use.name.as_ref()))
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tool_names_by_id.extend(
|
||||
tool_uses
|
||||
.iter()
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect(),
|
||||
.map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())),
|
||||
);
|
||||
|
||||
this.tool_uses_by_assistant_message
|
||||
.insert(message.id, tool_uses);
|
||||
}
|
||||
}
|
||||
Role::User => {
|
||||
@@ -76,6 +90,14 @@ impl ToolUseState {
|
||||
|
||||
for tool_result in &message.tool_results {
|
||||
let tool_use_id = tool_result.tool_use_id.clone();
|
||||
let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else {
|
||||
log::warn!("no tool name found for tool use: {tool_use_id:?}");
|
||||
continue;
|
||||
};
|
||||
|
||||
if !(filter_by_tool_name)(tool_use.as_ref()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool_uses_by_user_message.push(tool_use_id.clone());
|
||||
this.tool_results.insert(
|
||||
@@ -202,7 +224,7 @@ impl ToolUseState {
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Result<String>,
|
||||
) {
|
||||
) -> Option<PendingToolUse> {
|
||||
match output {
|
||||
Ok(output) => {
|
||||
self.tool_results.insert(
|
||||
@@ -213,7 +235,7 @@ impl ToolUseState {
|
||||
is_error: false,
|
||||
},
|
||||
);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id)
|
||||
}
|
||||
Err(err) => {
|
||||
self.tool_results.insert(
|
||||
@@ -228,6 +250,8 @@ impl ToolUseState {
|
||||
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
|
||||
tool_use.status = PendingToolUseStatus::Error(err.to_string().into());
|
||||
}
|
||||
|
||||
self.pending_tool_uses_by_id.get(&tool_use_id).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -267,6 +291,7 @@ impl ToolUseState {
|
||||
pub struct PendingToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
/// The ID of the Assistant message in which the tool use was requested.
|
||||
#[allow(unused)]
|
||||
pub assistant_message_id: MessageId,
|
||||
pub name: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
|
||||
@@ -647,7 +647,6 @@ impl AssistantContext {
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
id: ContextId,
|
||||
replica_id: ReplicaId,
|
||||
@@ -768,7 +767,6 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn deserialize(
|
||||
saved_context: SavedContext,
|
||||
path: PathBuf,
|
||||
|
||||
@@ -535,7 +535,6 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn run_command(
|
||||
&mut self,
|
||||
command_range: Range<language::Anchor>,
|
||||
@@ -2057,7 +2056,6 @@ impl ContextEditor {
|
||||
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_patch_block(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
|
||||
@@ -134,7 +134,6 @@ impl SlashCommandCompletionProvider {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn complete_command_argument(
|
||||
&self,
|
||||
command_name: &str,
|
||||
|
||||
@@ -62,6 +62,7 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub editor_model: LanguageModelSelection,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
@@ -162,6 +163,7 @@ impl AssistantSettingsContent {
|
||||
})
|
||||
}
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -182,6 +184,7 @@ impl AssistantSettingsContent {
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -310,6 +313,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
})
|
||||
@@ -340,6 +344,8 @@ pub struct AssistantSettingsContentV2 {
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new chats.
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
/// The model to use when applying edits from the assistant.
|
||||
editor_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
/// Enable experimental live diffs in the assistant panel.
|
||||
@@ -470,6 +476,7 @@ impl Settings for AssistantSettings {
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
merge(&mut settings.editor_model, value.editor_model);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
@@ -528,6 +535,10 @@ mod tests {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
editor_model: Some(LanguageModelSelection {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
|
||||
@@ -88,7 +88,6 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
self.requires_argument()
|
||||
}
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
|
||||
@@ -15,6 +15,7 @@ path = "src/assistant_tool.rs"
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
derive_more.workspace = true
|
||||
language_model.workspace = true
|
||||
gpui.workspace = true
|
||||
parking_lot.workspace = true
|
||||
project.workspace = true
|
||||
|
||||
@@ -4,7 +4,8 @@ mod tool_working_set;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::{App, Entity, Task};
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
|
||||
pub use crate::tool_registry::*;
|
||||
@@ -14,6 +15,14 @@ pub fn init(cx: &mut App) {
|
||||
ToolRegistry::default_global(cx);
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
|
||||
pub enum ToolSource {
|
||||
/// A native tool built-in to Zed.
|
||||
Native,
|
||||
/// A tool provided by a context server.
|
||||
ContextServer { id: SharedString },
|
||||
}
|
||||
|
||||
/// A tool that can be used by a language model.
|
||||
pub trait Tool: 'static + Send + Sync {
|
||||
/// Returns the name of the tool.
|
||||
@@ -22,6 +31,11 @@ pub trait Tool: 'static + Send + Sync {
|
||||
/// Returns the description of the tool.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the source of the tool.
|
||||
fn source(&self) -> ToolSource {
|
||||
ToolSource::Native
|
||||
}
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
serde_json::Value::Object(serde_json::Map::default())
|
||||
@@ -31,6 +45,7 @@ pub trait Tool: 'static + Send + Sync {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>>;
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use gpui::App;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{Tool, ToolRegistry};
|
||||
use crate::{Tool, ToolRegistry, ToolSource};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
|
||||
pub struct ToolId(usize);
|
||||
@@ -15,13 +15,26 @@ pub struct ToolWorkingSet {
|
||||
state: Mutex<WorkingSetState>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct WorkingSetState {
|
||||
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
|
||||
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
|
||||
disabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
|
||||
is_scripting_tool_disabled: bool,
|
||||
next_tool_id: ToolId,
|
||||
}
|
||||
|
||||
impl Default for WorkingSetState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
context_server_tools_by_id: HashMap::default(),
|
||||
context_server_tools_by_name: HashMap::default(),
|
||||
disabled_tools_by_source: HashMap::default(),
|
||||
is_scripting_tool_disabled: true,
|
||||
next_tool_id: ToolId::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolWorkingSet {
|
||||
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
|
||||
self.state
|
||||
@@ -45,24 +58,125 @@ impl ToolWorkingSet {
|
||||
tools
|
||||
}
|
||||
|
||||
pub fn insert(&self, command: Arc<dyn Tool>) -> ToolId {
|
||||
pub fn are_all_tools_enabled(&self) -> bool {
|
||||
let state = self.state.lock();
|
||||
|
||||
state.disabled_tools_by_source.is_empty() && !state.is_scripting_tool_disabled
|
||||
}
|
||||
|
||||
pub fn enable_all_tools(&self) {
|
||||
let mut state = self.state.lock();
|
||||
let command_id = state.next_tool_id;
|
||||
|
||||
state.disabled_tools_by_source.clear();
|
||||
state.is_scripting_tool_disabled = false;
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&self, cx: &App) {
|
||||
let tools = self.tools_by_source(cx);
|
||||
|
||||
for (source, tools) in tools {
|
||||
let tool_names = tools
|
||||
.into_iter()
|
||||
.map(|tool| tool.name().into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
self.disable(source, &tool_names);
|
||||
}
|
||||
|
||||
self.disable_scripting_tool();
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let all_tools = self.tools(cx);
|
||||
|
||||
all_tools
|
||||
.into_iter()
|
||||
.filter(|tool| self.is_enabled(&tool.source(), &tool.name().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
|
||||
let mut tools_by_source = IndexMap::default();
|
||||
|
||||
for tool in self.tools(cx) {
|
||||
tools_by_source
|
||||
.entry(tool.source())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(tool);
|
||||
}
|
||||
|
||||
for tools in tools_by_source.values_mut() {
|
||||
tools.sort_by_key(|tool| tool.name());
|
||||
}
|
||||
|
||||
tools_by_source.sort_unstable_keys();
|
||||
|
||||
tools_by_source
|
||||
}
|
||||
|
||||
pub fn insert(&self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let mut state = self.state.lock();
|
||||
let tool_id = state.next_tool_id;
|
||||
state.next_tool_id.0 += 1;
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.insert(command_id, command.clone());
|
||||
.insert(tool_id, tool.clone());
|
||||
state.tools_changed();
|
||||
command_id
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn remove(&self, command_ids_to_remove: &[ToolId]) {
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_disabled(source, name)
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
let state = self.state.lock();
|
||||
state
|
||||
.disabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |disabled_tools| disabled_tools.contains(name))
|
||||
}
|
||||
|
||||
pub fn enable(&self, source: ToolSource, tools_to_enable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.disabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_enable.contains(name));
|
||||
}
|
||||
|
||||
pub fn disable(&self, source: ToolSource, tools_to_disable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.disabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_disable.into_iter().cloned());
|
||||
}
|
||||
|
||||
pub fn remove(&self, tool_ids_to_remove: &[ToolId]) {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.retain(|id, _| !command_ids_to_remove.contains(id));
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
state.tools_changed();
|
||||
}
|
||||
|
||||
pub fn is_scripting_tool_enabled(&self) -> bool {
|
||||
let state = self.state.lock();
|
||||
!state.is_scripting_tool_disabled
|
||||
}
|
||||
|
||||
pub fn enable_scripting_tool(&self) {
|
||||
let mut state = self.state.lock();
|
||||
state.is_scripting_tool_disabled = false;
|
||||
}
|
||||
|
||||
pub fn disable_scripting_tool(&self) {
|
||||
let mut state = self.state.lock();
|
||||
state.is_scripting_tool_disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl WorkingSetState {
|
||||
@@ -71,7 +185,7 @@ impl WorkingSetState {
|
||||
self.context_server_tools_by_name.extend(
|
||||
self.context_server_tools_by_id
|
||||
.values()
|
||||
.map(|command| (command.name(), command.clone())),
|
||||
.map(|tool| (tool.name(), tool.clone())),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,20 @@ path = "src/assistant_tools.rs"
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
project.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
rand.workspace = true
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,19 +1,25 @@
|
||||
mod list_worktrees_tool;
|
||||
mod edit_files_tool;
|
||||
mod list_directory_tool;
|
||||
mod now_tool;
|
||||
mod read_file_tool;
|
||||
mod regex_search;
|
||||
|
||||
use assistant_tool::ToolRegistry;
|
||||
use gpui::App;
|
||||
|
||||
use crate::list_worktrees_tool::ListWorktreesTool;
|
||||
use crate::edit_files_tool::EditFilesTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::read_file_tool::ReadFileTool;
|
||||
use crate::regex_search::RegexSearchTool;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
assistant_tool::init(cx);
|
||||
|
||||
let registry = ToolRegistry::global(cx);
|
||||
registry.register_tool(NowTool);
|
||||
registry.register_tool(ListWorktreesTool);
|
||||
registry.register_tool(ReadFileTool);
|
||||
registry.register_tool(ListDirectoryTool);
|
||||
registry.register_tool(EditFilesTool);
|
||||
registry.register_tool(RegexSearchTool);
|
||||
}
|
||||
|
||||
178
crates/assistant_tools/src/edit_files_tool.rs
Normal file
178
crates/assistant_tools/src/edit_files_tool.rs
Normal file
@@ -0,0 +1,178 @@
|
||||
mod edit_action;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_tool::Tool;
|
||||
use collections::HashSet;
|
||||
use edit_action::{EditAction, EditActionParser};
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use project::{Project, ProjectPath};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct EditFilesToolInput {
|
||||
/// High-level edit instructions. These will be interpreted by a smaller model,
|
||||
/// so explain the edits you want that model to make and to which files need changing.
|
||||
/// The description should be concise and clear. We will show this description to the user
|
||||
/// as well.
|
||||
///
|
||||
/// <example>
|
||||
/// If you want to rename a function you can say "Rename the function 'foo' to 'bar'".
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// If you want to add a new function you can say "Add a new method to the `User` struct that prints the age".
|
||||
/// </example>
|
||||
pub edit_instructions: String,
|
||||
}
|
||||
|
||||
pub struct EditFilesTool;
|
||||
|
||||
impl Tool for EditFilesTool {
|
||||
fn name(&self) -> String {
|
||||
"edit-files".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./edit_files_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(EditFilesToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<EditFilesToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let Some(model) = model_registry.editor_model() else {
|
||||
return Task::ready(Err(anyhow!("No editor model configured")));
|
||||
};
|
||||
|
||||
let mut messages = messages.to_vec();
|
||||
if let Some(last_message) = messages.last_mut() {
|
||||
// Strip out tool use from the last message because we're in the middle of executing a tool call.
|
||||
last_message
|
||||
.content
|
||||
.retain(|content| !matches!(content, language_model::MessageContent::ToolUse(_)))
|
||||
}
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
include_str!("./edit_files_tool/edit_prompt.md").into(),
|
||||
input.edit_instructions.into(),
|
||||
],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let request = LanguageModelRequest {
|
||||
messages,
|
||||
tools: vec![],
|
||||
stop: vec![],
|
||||
temperature: None,
|
||||
};
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
|
||||
let stream = model.stream_completion_text(request, &cx);
|
||||
let mut chunks = stream.await?;
|
||||
|
||||
let mut changed_buffers = HashSet::default();
|
||||
let mut applied_edits = 0;
|
||||
|
||||
while let Some(chunk) = chunks.stream.next().await {
|
||||
for action in parser.parse_chunk(&chunk?) {
|
||||
let project_path = project.read_with(&cx, |project, cx| {
|
||||
let worktree_root_name = action
|
||||
.file_path()
|
||||
.components()
|
||||
.next()
|
||||
.context("Invalid path")?;
|
||||
let worktree = project
|
||||
.worktree_for_root_name(
|
||||
&worktree_root_name.as_os_str().to_string_lossy(),
|
||||
cx,
|
||||
)
|
||||
.context("Directory not found in project")?;
|
||||
anyhow::Ok(ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Arc::from(
|
||||
action.file_path().strip_prefix(worktree_root_name).unwrap(),
|
||||
),
|
||||
})
|
||||
})??;
|
||||
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await?;
|
||||
|
||||
let diff = buffer
|
||||
.read_with(&cx, |buffer, cx| {
|
||||
let new_text = match action {
|
||||
EditAction::Replace { old, new, .. } => {
|
||||
// TODO: Replace in background?
|
||||
buffer.text().replace(&old, &new)
|
||||
}
|
||||
EditAction::Write { content, .. } => content,
|
||||
};
|
||||
|
||||
buffer.diff(new_text, cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
let _clock =
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
|
||||
|
||||
changed_buffers.insert(buffer);
|
||||
|
||||
applied_edits += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Save each buffer once at the end
|
||||
for buffer in changed_buffers {
|
||||
project
|
||||
.update(&mut cx, |project, cx| project.save_buffer(buffer, cx))?
|
||||
.await?;
|
||||
}
|
||||
|
||||
let errors = parser.errors();
|
||||
|
||||
if errors.is_empty() {
|
||||
Ok("Successfully applied all edits".into())
|
||||
} else {
|
||||
let error_message = errors
|
||||
.iter()
|
||||
.map(|e| e.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
if applied_edits > 0 {
|
||||
Err(anyhow!(
|
||||
"Applied {} edit(s), but some blocks failed to parse:\n{}",
|
||||
applied_edits,
|
||||
error_message
|
||||
))
|
||||
} else {
|
||||
Err(anyhow!(error_message))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
Edit files in the current project.
|
||||
|
||||
When using this tool, you should suggest one coherent edit that can be made to the codebase.
|
||||
|
||||
When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make.
|
||||
907
crates/assistant_tools/src/edit_files_tool/edit_action.rs
Normal file
907
crates/assistant_tools/src/edit_files_tool/edit_action.rs
Normal file
@@ -0,0 +1,907 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use util::ResultExt;
|
||||
|
||||
/// Represents an edit action to be performed on a file.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum EditAction {
|
||||
/// Replace specific content in a file with new content
|
||||
Replace {
|
||||
file_path: PathBuf,
|
||||
old: String,
|
||||
new: String,
|
||||
},
|
||||
/// Write content to a file (create or overwrite)
|
||||
Write { file_path: PathBuf, content: String },
|
||||
}
|
||||
|
||||
impl EditAction {
|
||||
pub fn file_path(&self) -> &Path {
|
||||
match self {
|
||||
EditAction::Replace { file_path, .. } => file_path,
|
||||
EditAction::Write { file_path, .. } => file_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses edit actions from an LLM response.
|
||||
/// See system.md for more details on the format.
|
||||
#[derive(Debug)]
|
||||
pub struct EditActionParser {
|
||||
state: State,
|
||||
pre_fence_line: Vec<u8>,
|
||||
marker_ix: usize,
|
||||
line: usize,
|
||||
column: usize,
|
||||
old_bytes: Vec<u8>,
|
||||
new_bytes: Vec<u8>,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum State {
|
||||
/// Anywhere outside an action
|
||||
Default,
|
||||
/// After opening ```, in optional language tag
|
||||
OpenFence,
|
||||
/// In SEARCH marker
|
||||
SearchMarker,
|
||||
/// In search block or divider
|
||||
SearchBlock,
|
||||
/// In replace block or REPLACE marker
|
||||
ReplaceBlock,
|
||||
/// In closing ```
|
||||
CloseFence,
|
||||
}
|
||||
|
||||
impl EditActionParser {
|
||||
/// Creates a new `EditActionParser`
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
state: State::Default,
|
||||
pre_fence_line: Vec::new(),
|
||||
marker_ix: 0,
|
||||
line: 1,
|
||||
column: 0,
|
||||
old_bytes: Vec::new(),
|
||||
new_bytes: Vec::new(),
|
||||
errors: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Processes a chunk of input text and returns any completed edit actions.
|
||||
///
|
||||
/// This method can be called repeatedly with fragments of input. The parser
|
||||
/// maintains its state between calls, allowing you to process streaming input
|
||||
/// as it becomes available. Actions are only inserted once they are fully parsed.
|
||||
///
|
||||
/// If a block fails to parse, it will simply be skipped and an error will be recorded.
|
||||
/// All errors can be accessed through the `EditActionsParser::errors` method.
|
||||
pub fn parse_chunk(&mut self, input: &str) -> Vec<EditAction> {
|
||||
use State::*;
|
||||
|
||||
const FENCE: &[u8] = b"```";
|
||||
const SEARCH_MARKER: &[u8] = b"<<<<<<< SEARCH";
|
||||
const DIVIDER: &[u8] = b"=======";
|
||||
const NL_DIVIDER: &[u8] = b"\n=======";
|
||||
const REPLACE_MARKER: &[u8] = b">>>>>>> REPLACE";
|
||||
const NL_REPLACE_MARKER: &[u8] = b"\n>>>>>>> REPLACE";
|
||||
|
||||
let mut actions = Vec::new();
|
||||
|
||||
for byte in input.bytes() {
|
||||
// Update line and column tracking
|
||||
if byte == b'\n' {
|
||||
self.line += 1;
|
||||
self.column = 0;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
|
||||
match &self.state {
|
||||
Default => match match_marker(byte, FENCE, false, &mut self.marker_ix) {
|
||||
MarkerMatch::Complete => {
|
||||
self.to_state(OpenFence);
|
||||
}
|
||||
MarkerMatch::Partial => {}
|
||||
MarkerMatch::None => {
|
||||
if self.marker_ix > 0 {
|
||||
self.marker_ix = 0;
|
||||
} else if self.pre_fence_line.ends_with(b"\n") {
|
||||
self.pre_fence_line.clear();
|
||||
}
|
||||
|
||||
self.pre_fence_line.push(byte);
|
||||
}
|
||||
},
|
||||
OpenFence => {
|
||||
// skip language tag
|
||||
if byte == b'\n' {
|
||||
self.to_state(SearchMarker);
|
||||
}
|
||||
}
|
||||
SearchMarker => {
|
||||
if self.expect_marker(byte, SEARCH_MARKER, true) {
|
||||
self.to_state(SearchBlock);
|
||||
}
|
||||
}
|
||||
SearchBlock => {
|
||||
if collect_until_marker(
|
||||
byte,
|
||||
DIVIDER,
|
||||
NL_DIVIDER,
|
||||
true,
|
||||
&mut self.marker_ix,
|
||||
&mut self.old_bytes,
|
||||
) {
|
||||
self.to_state(ReplaceBlock);
|
||||
}
|
||||
}
|
||||
ReplaceBlock => {
|
||||
if collect_until_marker(
|
||||
byte,
|
||||
REPLACE_MARKER,
|
||||
NL_REPLACE_MARKER,
|
||||
true,
|
||||
&mut self.marker_ix,
|
||||
&mut self.new_bytes,
|
||||
) {
|
||||
self.to_state(CloseFence);
|
||||
}
|
||||
}
|
||||
CloseFence => {
|
||||
if self.expect_marker(byte, FENCE, false) {
|
||||
if let Some(action) = self.action() {
|
||||
actions.push(action);
|
||||
}
|
||||
self.errors();
|
||||
self.reset();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
actions
|
||||
}
|
||||
|
||||
/// Returns a reference to the errors encountered during parsing.
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.errors
|
||||
}
|
||||
|
||||
fn action(&mut self) -> Option<EditAction> {
|
||||
if self.old_bytes.is_empty() && self.new_bytes.is_empty() {
|
||||
self.push_error(ParseErrorKind::NoOp);
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut pre_fence_line = std::mem::take(&mut self.pre_fence_line);
|
||||
|
||||
if pre_fence_line.ends_with(b"\n") {
|
||||
pre_fence_line.pop();
|
||||
pop_carriage_return(&mut pre_fence_line);
|
||||
}
|
||||
|
||||
let file_path = PathBuf::from(String::from_utf8(pre_fence_line).log_err()?);
|
||||
let content = String::from_utf8(std::mem::take(&mut self.new_bytes)).log_err()?;
|
||||
|
||||
if self.old_bytes.is_empty() {
|
||||
Some(EditAction::Write { file_path, content })
|
||||
} else {
|
||||
let old = String::from_utf8(std::mem::take(&mut self.old_bytes)).log_err()?;
|
||||
|
||||
Some(EditAction::Replace {
|
||||
file_path,
|
||||
old,
|
||||
new: content,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_marker(&mut self, byte: u8, marker: &'static [u8], trailing_newline: bool) -> bool {
|
||||
match match_marker(byte, marker, trailing_newline, &mut self.marker_ix) {
|
||||
MarkerMatch::Complete => true,
|
||||
MarkerMatch::Partial => false,
|
||||
MarkerMatch::None => {
|
||||
self.push_error(ParseErrorKind::ExpectedMarker {
|
||||
expected: marker,
|
||||
found: byte,
|
||||
});
|
||||
self.reset();
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_state(&mut self, state: State) {
|
||||
self.state = state;
|
||||
self.marker_ix = 0;
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.pre_fence_line.clear();
|
||||
self.old_bytes.clear();
|
||||
self.new_bytes.clear();
|
||||
self.to_state(State::Default);
|
||||
}
|
||||
|
||||
fn push_error(&mut self, kind: ParseErrorKind) {
|
||||
self.errors.push(ParseError {
|
||||
line: self.line,
|
||||
column: self.column,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum MarkerMatch {
|
||||
None,
|
||||
Partial,
|
||||
Complete,
|
||||
}
|
||||
|
||||
fn match_marker(
|
||||
byte: u8,
|
||||
marker: &[u8],
|
||||
trailing_newline: bool,
|
||||
marker_ix: &mut usize,
|
||||
) -> MarkerMatch {
|
||||
if trailing_newline && *marker_ix >= marker.len() {
|
||||
if byte == b'\n' {
|
||||
MarkerMatch::Complete
|
||||
} else if byte == b'\r' {
|
||||
MarkerMatch::Partial
|
||||
} else {
|
||||
MarkerMatch::None
|
||||
}
|
||||
} else if byte == marker[*marker_ix] {
|
||||
*marker_ix += 1;
|
||||
|
||||
if *marker_ix < marker.len() || trailing_newline {
|
||||
MarkerMatch::Partial
|
||||
} else {
|
||||
MarkerMatch::Complete
|
||||
}
|
||||
} else {
|
||||
MarkerMatch::None
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_until_marker(
|
||||
byte: u8,
|
||||
marker: &[u8],
|
||||
nl_marker: &[u8],
|
||||
trailing_newline: bool,
|
||||
marker_ix: &mut usize,
|
||||
buf: &mut Vec<u8>,
|
||||
) -> bool {
|
||||
let marker = if buf.is_empty() {
|
||||
// do not require another newline if block is empty
|
||||
marker
|
||||
} else {
|
||||
nl_marker
|
||||
};
|
||||
|
||||
match match_marker(byte, marker, trailing_newline, marker_ix) {
|
||||
MarkerMatch::Complete => {
|
||||
pop_carriage_return(buf);
|
||||
true
|
||||
}
|
||||
MarkerMatch::Partial => false,
|
||||
MarkerMatch::None => {
|
||||
if *marker_ix > 0 {
|
||||
buf.extend_from_slice(&marker[..*marker_ix]);
|
||||
*marker_ix = 0;
|
||||
|
||||
// The beginning of marker might match current byte
|
||||
match match_marker(byte, marker, trailing_newline, marker_ix) {
|
||||
MarkerMatch::Complete => return true,
|
||||
MarkerMatch::Partial => return false,
|
||||
MarkerMatch::None => { /* no match, keep collecting */ }
|
||||
}
|
||||
}
|
||||
|
||||
buf.push(byte);
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn pop_carriage_return(buf: &mut Vec<u8>) {
|
||||
if buf.ends_with(b"\r") {
|
||||
buf.pop();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ParseError {
|
||||
line: usize,
|
||||
column: usize,
|
||||
kind: ParseErrorKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ParseErrorKind {
|
||||
ExpectedMarker { expected: &'static [u8], found: u8 },
|
||||
NoOp,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParseErrorKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ParseErrorKind::ExpectedMarker { expected, found } => {
|
||||
write!(
|
||||
f,
|
||||
"Expected marker {:?}, found {:?}",
|
||||
String::from_utf8_lossy(expected),
|
||||
*found as char
|
||||
)
|
||||
}
|
||||
ParseErrorKind::NoOp => {
|
||||
write!(f, "No search or replace")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "input:{}:{}: {}", self.line, self.column, self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
|
||||
#[test]
|
||||
fn test_simple_edit_action() {
|
||||
let input = r#"src/main.rs
|
||||
```
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_language_tag() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_surrounding_text() {
|
||||
let input = r#"Here's a modification I'd like to make to the file:
|
||||
|
||||
src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
This change makes the function better.
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_edit_actions() {
|
||||
let input = r#"First change:
|
||||
src/main.rs
|
||||
```
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
Second change:
|
||||
src/utils.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn old_util() -> bool { false }
|
||||
=======
|
||||
fn new_util() -> bool { true }
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 2);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
actions[1],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/utils.rs"),
|
||||
old: "fn old_util() -> bool { false }".to_string(),
|
||||
new: "fn new_util() -> bool { true }".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiline() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {
|
||||
println!("This is the original function");
|
||||
let x = 42;
|
||||
if x > 0 {
|
||||
println!("Positive number");
|
||||
}
|
||||
}
|
||||
=======
|
||||
fn replacement() {
|
||||
println!("This is the replacement function");
|
||||
let x = 100;
|
||||
if x > 50 {
|
||||
println!("Large number");
|
||||
} else {
|
||||
println!("Small number");
|
||||
}
|
||||
}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {\n println!(\"This is the original function\");\n let x = 42;\n if x > 0 {\n println!(\"Positive number\");\n }\n}".to_string(),
|
||||
new: "fn replacement() {\n println!(\"This is the replacement function\");\n let x = 100;\n if x > 50 {\n println!(\"Large number\");\n } else {\n println!(\"Small number\");\n }\n}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_action() {
|
||||
let input = r#"Create a new main.rs file:
|
||||
|
||||
src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
fn new_function() {
|
||||
println!("This function is being added");
|
||||
}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Write {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
content: "fn new_function() {\n println!(\"This function is being added\");\n}"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_replace() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn this_will_be_deleted() {
|
||||
println!("Deleting this function");
|
||||
}
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(&input);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn this_will_be_deleted() {\n println!(\"Deleting this function\");\n}"
|
||||
.to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
|
||||
let actions = parser.parse_chunk(&input.replace("\n", "\r\n"));
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old:
|
||||
"fn this_will_be_deleted() {\r\n println!(\"Deleting this function\");\r\n}"
|
||||
.to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_both() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
// Should not create an action when both sections are empty
|
||||
assert_eq!(actions.len(), 0);
|
||||
|
||||
// Check that the NoOp error was added
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
match parser.errors()[0].kind {
|
||||
ParseErrorKind::NoOp => {}
|
||||
_ => panic!("Expected NoOp error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resumability() {
|
||||
let input_part1 = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn ori"#;
|
||||
|
||||
let input_part2 = r#"ginal() {}
|
||||
=======
|
||||
fn replacement() {}"#;
|
||||
|
||||
let input_part3 = r#"
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions1 = parser.parse_chunk(input_part1);
|
||||
assert_eq!(actions1.len(), 0);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
|
||||
let actions2 = parser.parse_chunk(input_part2);
|
||||
// No actions should be complete yet
|
||||
assert_eq!(actions2.len(), 0);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
|
||||
let actions3 = parser.parse_chunk(input_part3);
|
||||
// The third chunk should complete the action
|
||||
assert_eq!(actions3.len(), 1);
|
||||
assert_eq!(
|
||||
actions3[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_state_preservation() {
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions1 = parser.parse_chunk("src/main.rs\n```rust\n<<<<<<< SEARCH\n");
|
||||
|
||||
// Check parser is in the correct state
|
||||
assert_eq!(parser.state, State::SearchBlock);
|
||||
assert_eq!(parser.pre_fence_line, b"src/main.rs\n");
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
|
||||
// Continue parsing
|
||||
let actions2 = parser.parse_chunk("original code\n=======\n");
|
||||
assert_eq!(parser.state, State::ReplaceBlock);
|
||||
assert_eq!(parser.old_bytes, b"original code");
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
|
||||
let actions3 = parser.parse_chunk("replacement code\n>>>>>>> REPLACE\n```\n");
|
||||
|
||||
// After complete parsing, state should reset
|
||||
assert_eq!(parser.state, State::Default);
|
||||
assert_eq!(parser.pre_fence_line, b"\n");
|
||||
assert!(parser.old_bytes.is_empty());
|
||||
assert!(parser.new_bytes.is_empty());
|
||||
|
||||
assert_eq!(actions1.len(), 0);
|
||||
assert_eq!(actions2.len(), 0);
|
||||
assert_eq!(actions3.len(), 1);
|
||||
assert_eq!(parser.errors().len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_search_marker() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< WRONG_MARKER
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
assert_eq!(actions.len(), 0);
|
||||
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
let error = &parser.errors()[0];
|
||||
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"input:3:9: Expected marker \"<<<<<<< SEARCH\", found 'W'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_closing_fence() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
<!-- Missing closing fence -->
|
||||
|
||||
src/utils.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn utils_func() {}
|
||||
=======
|
||||
fn new_utils_func() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
// Only the second block should be parsed
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/utils.rs"),
|
||||
old: "fn utils_func() {}".to_string(),
|
||||
new: "fn new_utils_func() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
assert_eq!(
|
||||
parser.errors()[0].to_string(),
|
||||
"input:8:1: Expected marker \"```\", found '<'".to_string()
|
||||
);
|
||||
|
||||
// The parser should continue after an error
|
||||
assert_eq!(parser.state, State::Default);
|
||||
}
|
||||
|
||||
const SYSTEM_PROMPT: &str = include_str!("./edit_prompt.md");
|
||||
|
||||
#[test]
|
||||
fn test_parse_examples_in_system_prompt() {
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(SYSTEM_PROMPT);
|
||||
assert_examples_in_system_prompt(&actions, parser.errors());
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
fn test_random_chunking_of_system_prompt(mut rng: StdRng) {
|
||||
let mut parser = EditActionParser::new();
|
||||
let mut remaining = SYSTEM_PROMPT;
|
||||
let mut actions = Vec::with_capacity(5);
|
||||
|
||||
while !remaining.is_empty() {
|
||||
let chunk_size = rng.gen_range(1..=std::cmp::min(remaining.len(), 100));
|
||||
|
||||
let (chunk, rest) = remaining.split_at(chunk_size);
|
||||
|
||||
actions.extend(parser.parse_chunk(chunk));
|
||||
remaining = rest;
|
||||
}
|
||||
|
||||
assert_examples_in_system_prompt(&actions, parser.errors());
|
||||
}
|
||||
|
||||
fn assert_examples_in_system_prompt(actions: &[EditAction], errors: &[ParseError]) {
|
||||
assert_eq!(actions.len(), 5);
|
||||
|
||||
assert_eq!(
|
||||
actions[0],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: "from flask import Flask".to_string(),
|
||||
new: "import math\nfrom flask import Flask".to_string(),
|
||||
}
|
||||
.fix_lf(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[1],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: "def factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n".to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
.fix_lf()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[2],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: " return str(factorial(n))".to_string(),
|
||||
new: " return str(math.factorial(n))".to_string(),
|
||||
}
|
||||
.fix_lf(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[3],
|
||||
EditAction::Write {
|
||||
file_path: PathBuf::from("hello.py"),
|
||||
content: "def hello():\n \"print a greeting\"\n\n print(\"hello\")"
|
||||
.to_string(),
|
||||
}
|
||||
.fix_lf(),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[4],
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("main.py"),
|
||||
old: "def hello():\n \"print a greeting\"\n\n print(\"hello\")".to_string(),
|
||||
new: "from hello import hello".to_string(),
|
||||
}
|
||||
.fix_lf(),
|
||||
);
|
||||
|
||||
// The system prompt includes some text that would produce errors
|
||||
assert_eq!(
|
||||
errors[0].to_string(),
|
||||
"input:102:1: Expected marker \"<<<<<<< SEARCH\", found '3'"
|
||||
);
|
||||
#[cfg(not(windows))]
|
||||
assert_eq!(
|
||||
errors[1].to_string(),
|
||||
"input:109:0: Expected marker \"<<<<<<< SEARCH\", found '\\n'"
|
||||
);
|
||||
#[cfg(windows)]
|
||||
assert_eq!(
|
||||
errors[1].to_string(),
|
||||
"input:108:1: Expected marker \"<<<<<<< SEARCH\", found '\\r'"
|
||||
);
|
||||
}
|
||||
|
||||
impl EditAction {
|
||||
fn fix_lf(self: EditAction) -> EditAction {
|
||||
#[cfg(windows)]
|
||||
match self {
|
||||
EditAction::Replace {
|
||||
file_path,
|
||||
old,
|
||||
new,
|
||||
} => EditAction::Replace {
|
||||
file_path: file_path.clone(),
|
||||
old: old.replace("\n", "\r\n"),
|
||||
new: new.replace("\n", "\r\n"),
|
||||
},
|
||||
EditAction::Write { file_path, content } => EditAction::Write {
|
||||
file_path: file_path.clone(),
|
||||
content: content.replace("\n", "\r\n"),
|
||||
},
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_print_error() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< WRONG_MARKER
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
let error = &parser.errors()[0];
|
||||
let expected_error = r#"input:3:9: Expected marker "<<<<<<< SEARCH", found 'W'"#;
|
||||
|
||||
assert_eq!(format!("{}", error), expected_error);
|
||||
}
|
||||
}
|
||||
134
crates/assistant_tools/src/edit_files_tool/edit_prompt.md
Normal file
134
crates/assistant_tools/src/edit_files_tool/edit_prompt.md
Normal file
@@ -0,0 +1,134 @@
|
||||
Act as an expert software developer.
|
||||
Always use best practices when coding.
|
||||
Respect and use existing conventions, libraries, etc that are already present in the code base.
|
||||
|
||||
Take requests for changes to the supplied code.
|
||||
Always reply to the user in the same language they are using.
|
||||
|
||||
Once you understand the request, you MUST:
|
||||
|
||||
1. Decide if you need to propose *SEARCH/REPLACE* edits to any files that haven't been added to the chat. You can create new files without asking!
|
||||
|
||||
2. Think step-by-step and explain the needed changes in a few short sentences.
|
||||
|
||||
3. Describe each change with a *SEARCH/REPLACE block* per the examples below.
|
||||
|
||||
All changes to files must use this *SEARCH/REPLACE block* format.
|
||||
ONLY EVER RETURN CODE IN A *SEARCH/REPLACE BLOCK*!
|
||||
|
||||
# Example conversations:
|
||||
|
||||
## USER: Change get_factorial() to use math.factorial
|
||||
|
||||
## ASSISTANT: To make this change we need to modify `mathweb/flask/app.py` to:
|
||||
|
||||
1. Import the math package.
|
||||
2. Remove the existing factorial() function.
|
||||
3. Update get_factorial() to call math.factorial instead.
|
||||
|
||||
Here are the *SEARCH/REPLACE* blocks:
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
from flask import Flask
|
||||
=======
|
||||
import math
|
||||
from flask import Flask
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
def factorial(n):
|
||||
"compute factorial"
|
||||
|
||||
if n == 0:
|
||||
return 1
|
||||
else:
|
||||
return n * factorial(n-1)
|
||||
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
return str(factorial(n))
|
||||
=======
|
||||
return str(math.factorial(n))
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
|
||||
## USER: Refactor hello() into its own file.
|
||||
|
||||
## ASSISTANT: To make this change we need to modify `main.py` and make a new file `hello.py`:
|
||||
|
||||
1. Make a new hello.py file with hello() in it.
|
||||
2. Remove hello() from main.py and replace it with an import.
|
||||
|
||||
Here are the *SEARCH/REPLACE* blocks:
|
||||
|
||||
hello.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
def hello():
|
||||
"print a greeting"
|
||||
|
||||
print("hello")
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
main.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
def hello():
|
||||
"print a greeting"
|
||||
|
||||
print("hello")
|
||||
=======
|
||||
from hello import hello
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
# *SEARCH/REPLACE block* Rules:
|
||||
|
||||
Every *SEARCH/REPLACE block* must use this format:
|
||||
1. The *FULL* file path alone on a line, verbatim. No bold asterisks, no quotes around it, no escaping of characters, etc.
|
||||
2. The opening fence and code language, eg: ```python
|
||||
3. The start of search block: <<<<<<< SEARCH
|
||||
4. A contiguous chunk of lines to search for in the existing source code
|
||||
5. The dividing line: =======
|
||||
6. The lines to replace into the source code
|
||||
7. The end of the replace block: >>>>>>> REPLACE
|
||||
8. The closing fence: ```
|
||||
|
||||
Use the *FULL* file path, as shown to you by the user.
|
||||
|
||||
Every *SEARCH* section must *EXACTLY MATCH* the existing file content, character for character, including all comments, docstrings, etc.
|
||||
If the file contains code or other data wrapped/escaped in json/xml/quotes or other containers, you need to propose edits to the literal contents of the file, including the container markup.
|
||||
|
||||
*SEARCH/REPLACE* blocks will *only* replace the first match occurrence.
|
||||
Including multiple unique *SEARCH/REPLACE* blocks if needed.
|
||||
Include enough lines in each SEARCH section to uniquely match each set of lines that need to change.
|
||||
|
||||
Keep *SEARCH/REPLACE* blocks concise.
|
||||
Break large *SEARCH/REPLACE* blocks into a series of smaller blocks that each change a small portion of the file.
|
||||
Include just the changing lines, and a few surrounding lines if needed for uniqueness.
|
||||
Do not include long runs of unchanging lines in *SEARCH/REPLACE* blocks.
|
||||
|
||||
Only create *SEARCH/REPLACE* blocks for files that the user has added to the chat!
|
||||
|
||||
To move code within a file, use 2 *SEARCH/REPLACE* blocks: 1 to delete it from its current location, 1 to insert it in the new location.
|
||||
|
||||
Pay attention to which filenames the user wants you to edit, especially if they are asking you to create a new file.
|
||||
|
||||
If you want to put code in a new file, use a *SEARCH/REPLACE block* with:
|
||||
- A new file path, including dir name if needed
|
||||
- An empty `SEARCH` section
|
||||
- The new file's contents in the `REPLACE` section
|
||||
|
||||
ONLY EVER RETURN CODE IN A *SEARCH/REPLACE BLOCK*!
|
||||
88
crates/assistant_tools/src/list_directory_tool.rs
Normal file
88
crates/assistant_tools/src/list_directory_tool.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Write, path::Path, sync::Arc};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ListDirectoryToolInput {
|
||||
/// The relative path of the directory to list.
|
||||
///
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a top-level directory in a project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following top-level directories:
|
||||
///
|
||||
/// - directory1
|
||||
/// - directory2
|
||||
///
|
||||
/// You can list the contents of `directory1` by using the path `directory1`.
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following top-level directories:
|
||||
///
|
||||
/// - foo
|
||||
/// - bar
|
||||
///
|
||||
/// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`.
|
||||
/// </example>
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
pub struct ListDirectoryTool;
|
||||
|
||||
impl Tool for ListDirectoryTool {
|
||||
fn name(&self) -> String {
|
||||
"list-directory".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./list_directory_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(ListDirectoryToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<ListDirectoryToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let Some(worktree_root_name) = input.path.components().next() else {
|
||||
return Task::ready(Err(anyhow!("Invalid path")));
|
||||
};
|
||||
let Some(worktree) = project
|
||||
.read(cx)
|
||||
.worktree_for_root_name(&worktree_root_name.as_os_str().to_string_lossy(), cx)
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Directory not found in the project")));
|
||||
};
|
||||
let path = input.path.strip_prefix(worktree_root_name).unwrap();
|
||||
let mut output = String::new();
|
||||
for entry in worktree.read(cx).child_entries(path) {
|
||||
writeln!(
|
||||
output,
|
||||
"{}",
|
||||
Path::new(worktree_root_name.as_os_str())
|
||||
.join(&entry.path)
|
||||
.display(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
Task::ready(Ok(output))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
Lists files and directories in a given path.
|
||||
@@ -1,77 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{App, Entity, Task};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ListWorktreesToolInput {}
|
||||
|
||||
pub struct ListWorktreesTool;
|
||||
|
||||
impl Tool for ListWorktreesTool {
|
||||
fn name(&self) -> String {
|
||||
"list-worktrees".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Lists all worktrees in the current project. Use this tool when you need to find available worktrees and their IDs.".into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
serde_json::json!(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_input: serde_json::Value,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
cx.spawn(|cx| async move {
|
||||
cx.update(|cx| {
|
||||
#[derive(Debug, Serialize)]
|
||||
struct WorktreeInfo {
|
||||
id: usize,
|
||||
root_name: String,
|
||||
root_dir: Option<String>,
|
||||
}
|
||||
|
||||
let worktrees = project.update(cx, |project, cx| {
|
||||
project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| {
|
||||
worktree.read_with(cx, |worktree, _cx| WorktreeInfo {
|
||||
id: worktree.id().to_usize(),
|
||||
root_dir: worktree
|
||||
.root_dir()
|
||||
.map(|root_dir| root_dir.to_string_lossy().to_string()),
|
||||
root_name: worktree.root_name().to_string(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
if worktrees.is_empty() {
|
||||
return Ok("No worktrees found in the current project.".to_string());
|
||||
}
|
||||
|
||||
let mut result = String::from("Worktrees in the current project:\n\n");
|
||||
for worktree in worktrees {
|
||||
result.push_str(&serde_json::to_string(&worktree)?);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use chrono::{Local, Utc};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -42,6 +43,7 @@ impl Tool for NowTool {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
_project: Entity<Project>,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
|
||||
@@ -4,17 +4,27 @@ use std::sync::Arc;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{App, Entity, Task};
|
||||
use project::{Project, ProjectPath, WorktreeId};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::{Project, ProjectPath};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ReadFileToolInput {
|
||||
/// The ID of the worktree in which the file resides.
|
||||
pub worktree_id: usize,
|
||||
/// The path to the file to read.
|
||||
/// The relative path of the file to read.
|
||||
///
|
||||
/// This path is relative to the worktree root, it must not be an absolute path.
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a top-level directory in a project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following top-level directories:
|
||||
///
|
||||
/// - directory1
|
||||
/// - directory2
|
||||
///
|
||||
/// If you wanna access `file.txt` in `directory1`, you should use the path `directory1/file.txt`.
|
||||
/// If you wanna access `file.txt` in `directory2`, you should use the path `directory2/file.txt`.
|
||||
/// </example>
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
@@ -26,7 +36,7 @@ impl Tool for ReadFileTool {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Reads the content of a file specified by a worktree ID and path. Use this tool when you need to access the contents of a file in the project.".into()
|
||||
include_str!("./read_file_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
@@ -37,6 +47,7 @@ impl Tool for ReadFileTool {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
@@ -45,9 +56,18 @@ impl Tool for ReadFileTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let Some(worktree_root_name) = input.path.components().next() else {
|
||||
return Task::ready(Err(anyhow!("Invalid path")));
|
||||
};
|
||||
let Some(worktree) = project
|
||||
.read(cx)
|
||||
.worktree_for_root_name(&worktree_root_name.as_os_str().to_string_lossy(), cx)
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Directory not found in the project")));
|
||||
};
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(input.worktree_id),
|
||||
path: input.path,
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Arc::from(input.path.strip_prefix(worktree_root_name).unwrap()),
|
||||
};
|
||||
cx.spawn(|cx| async move {
|
||||
let buffer = cx
|
||||
@@ -56,7 +76,16 @@ impl Tool for ReadFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| buffer.read(cx).text())
|
||||
buffer.read_with(&cx, |buffer, _cx| {
|
||||
if buffer
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
{
|
||||
Ok(buffer.text())
|
||||
} else {
|
||||
Err(anyhow!("File does not exist"))
|
||||
}
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
1
crates/assistant_tools/src/read_file_tool/description.md
Normal file
1
crates/assistant_tools/src/read_file_tool/description.md
Normal file
@@ -0,0 +1 @@
|
||||
Reads the content of the given file in the project.
|
||||
119
crates/assistant_tools/src/regex_search.rs
Normal file
119
crates/assistant_tools/src/regex_search.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, Entity, Task};
|
||||
use language::OffsetRangeExt;
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::{search::SearchQuery, Project};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp, fmt::Write, sync::Arc};
|
||||
use util::paths::PathMatcher;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct RegexSearchToolInput {
|
||||
/// A regex pattern to search for in the entire project. Note that the regex
|
||||
/// will be parsed by the Rust `regex` crate.
|
||||
pub regex: String,
|
||||
}
|
||||
|
||||
pub struct RegexSearchTool;
|
||||
|
||||
impl Tool for RegexSearchTool {
|
||||
fn name(&self) -> String {
|
||||
"regex-search".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./regex_search_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(RegexSearchToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
const CONTEXT_LINES: u32 = 2;
|
||||
|
||||
let input = match serde_json::from_value::<RegexSearchToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let query = match SearchQuery::regex(
|
||||
&input.regex,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
PathMatcher::default(),
|
||||
PathMatcher::default(),
|
||||
None,
|
||||
) {
|
||||
Ok(query) => query,
|
||||
Err(error) => return Task::ready(Err(error)),
|
||||
};
|
||||
|
||||
let results = project.update(cx, |project, cx| project.search(query, cx));
|
||||
cx.spawn(|cx| async move {
|
||||
futures::pin_mut!(results);
|
||||
|
||||
let mut output = String::new();
|
||||
while let Some(project::search::SearchResult::Buffer { buffer, ranges }) =
|
||||
results.next().await
|
||||
{
|
||||
if ranges.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
buffer.read_with(&cx, |buffer, cx| {
|
||||
if let Some(path) = buffer.file().map(|file| file.full_path(cx)) {
|
||||
writeln!(output, "### Found matches in {}:\n", path.display()).unwrap();
|
||||
let mut ranges = ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let mut point_range = range.to_point(buffer);
|
||||
point_range.start.row =
|
||||
point_range.start.row.saturating_sub(CONTEXT_LINES);
|
||||
point_range.start.column = 0;
|
||||
point_range.end.row = cmp::min(
|
||||
buffer.max_point().row,
|
||||
point_range.end.row + CONTEXT_LINES,
|
||||
);
|
||||
point_range.end.column = buffer.line_len(point_range.end.row);
|
||||
point_range
|
||||
})
|
||||
.peekable();
|
||||
|
||||
while let Some(mut range) = ranges.next() {
|
||||
while let Some(next_range) = ranges.peek() {
|
||||
if range.end.row >= next_range.start.row {
|
||||
range.end = next_range.end;
|
||||
ranges.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(output, "```").unwrap();
|
||||
output.extend(buffer.text_for_range(range));
|
||||
writeln!(output, "\n```\n").unwrap();
|
||||
}
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
if output.is_empty() {
|
||||
Ok("No matches found".into())
|
||||
} else {
|
||||
Ok(output)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
Searches the entire project for the given regular expression.
|
||||
|
||||
Returns a list of paths that matched the query. For each path, it returns a list of excerpts of the matched text.
|
||||
@@ -22,6 +22,7 @@ git2.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
rope.workspace = true
|
||||
sum_tree.workspace = true
|
||||
text.workspace = true
|
||||
@@ -31,7 +32,6 @@ util.workspace = true
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
rand.workspace = true
|
||||
serde_json.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -6,9 +6,9 @@ use rope::Rope;
|
||||
use std::cmp::Ordering;
|
||||
use std::mem;
|
||||
use std::{future::Future, iter, ops::Range, sync::Arc};
|
||||
use sum_tree::{SumTree, TreeMap};
|
||||
use text::ToOffset as _;
|
||||
use sum_tree::SumTree;
|
||||
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
|
||||
use text::{AnchorRangeExt, ToOffset as _};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct BufferDiff {
|
||||
@@ -26,7 +26,7 @@ pub struct BufferDiffSnapshot {
|
||||
#[derive(Clone)]
|
||||
struct BufferDiffInner {
|
||||
hunks: SumTree<InternalDiffHunk>,
|
||||
pending_hunks: TreeMap<usize, PendingHunk>,
|
||||
pending_hunks: SumTree<PendingHunk>,
|
||||
base_text: language::BufferSnapshot,
|
||||
base_text_exists: bool,
|
||||
}
|
||||
@@ -48,7 +48,7 @@ pub enum DiffHunkStatusKind {
|
||||
pub enum DiffHunkSecondaryStatus {
|
||||
HasSecondaryHunk,
|
||||
OverlapsWithSecondaryHunk,
|
||||
None,
|
||||
NoSecondaryHunk,
|
||||
SecondaryHunkAdditionPending,
|
||||
SecondaryHunkRemovalPending,
|
||||
}
|
||||
@@ -74,6 +74,8 @@ struct InternalDiffHunk {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct PendingHunk {
|
||||
buffer_range: Range<Anchor>,
|
||||
diff_base_byte_range: Range<usize>,
|
||||
buffer_version: clock::Global,
|
||||
new_status: DiffHunkSecondaryStatus,
|
||||
}
|
||||
@@ -93,6 +95,16 @@ impl sum_tree::Item for InternalDiffHunk {
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for PendingHunk {
|
||||
type Summary = DiffHunkSummary;
|
||||
|
||||
fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary {
|
||||
DiffHunkSummary {
|
||||
buffer_range: self.buffer_range.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for DiffHunkSummary {
|
||||
type Context = text::BufferSnapshot;
|
||||
|
||||
@@ -176,6 +188,7 @@ impl BufferDiffSnapshot {
|
||||
}
|
||||
|
||||
impl BufferDiffInner {
|
||||
/// Returns the new index text and new pending hunks.
|
||||
fn stage_or_unstage_hunks(
|
||||
&mut self,
|
||||
unstaged_diff: &Self,
|
||||
@@ -183,7 +196,7 @@ impl BufferDiffInner {
|
||||
hunks: &[DiffHunk],
|
||||
buffer: &text::BufferSnapshot,
|
||||
file_exists: bool,
|
||||
) -> (Option<Rope>, Vec<(usize, PendingHunk)>) {
|
||||
) -> (Option<Rope>, SumTree<PendingHunk>) {
|
||||
let head_text = self
|
||||
.base_text_exists
|
||||
.then(|| self.base_text.as_rope().clone());
|
||||
@@ -195,41 +208,41 @@ impl BufferDiffInner {
|
||||
// entire file must be either created or deleted in the index.
|
||||
let (index_text, head_text) = match (index_text, head_text) {
|
||||
(Some(index_text), Some(head_text)) if file_exists || !stage => (index_text, head_text),
|
||||
(_, head_text @ _) => {
|
||||
if stage {
|
||||
(index_text, head_text) => {
|
||||
let (rope, new_status) = if stage {
|
||||
log::debug!("stage all");
|
||||
return (
|
||||
(
|
||||
file_exists.then(|| buffer.as_rope().clone()),
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
|
||||
)
|
||||
} else {
|
||||
log::debug!("unstage all");
|
||||
return (
|
||||
(
|
||||
head_text,
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
}
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
|
||||
)
|
||||
};
|
||||
|
||||
let hunk = PendingHunk {
|
||||
buffer_range: Anchor::MIN..Anchor::MAX,
|
||||
diff_base_byte_range: 0..index_text.map_or(0, |rope| rope.len()),
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status,
|
||||
};
|
||||
let tree = SumTree::from_item(hunk, buffer);
|
||||
return (rope, tree);
|
||||
}
|
||||
};
|
||||
|
||||
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
let mut edits = Vec::new();
|
||||
let mut pending_hunks = Vec::new();
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
|
||||
let mut pending_hunks = SumTree::new(buffer);
|
||||
let mut old_pending_hunks = unstaged_diff
|
||||
.pending_hunks
|
||||
.cursor::<DiffHunkSummary>(buffer);
|
||||
|
||||
// first, merge new hunks into pending_hunks
|
||||
for DiffHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
@@ -237,12 +250,58 @@ impl BufferDiffInner {
|
||||
..
|
||||
} in hunks.iter().cloned()
|
||||
{
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::None)
|
||||
let preceding_pending_hunks =
|
||||
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
pending_hunks.append(preceding_pending_hunks, buffer);
|
||||
|
||||
// skip all overlapping old pending hunks
|
||||
while old_pending_hunks
|
||||
.item()
|
||||
.is_some_and(|preceding_pending_hunk_item| {
|
||||
preceding_pending_hunk_item
|
||||
.buffer_range
|
||||
.overlaps(&buffer_range, buffer)
|
||||
})
|
||||
{
|
||||
old_pending_hunks.next(buffer);
|
||||
}
|
||||
|
||||
// merge into pending hunks
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
|
||||
|| (!stage && secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pending_hunks.push(
|
||||
PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: if stage {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
} else {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
},
|
||||
},
|
||||
buffer,
|
||||
);
|
||||
}
|
||||
// append the remainder
|
||||
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
|
||||
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
let mut edits = Vec::<(Range<usize>, String)>::new();
|
||||
|
||||
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
|
||||
for PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
..
|
||||
} in pending_hunks.iter().cloned()
|
||||
{
|
||||
let skipped_hunks = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
if let Some(secondary_hunk) = skipped_hunks.last() {
|
||||
@@ -294,22 +353,15 @@ impl BufferDiffInner {
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect::<String>()
|
||||
};
|
||||
pending_hunks.push((
|
||||
diff_base_byte_range.start,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: if stage {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
} else {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
},
|
||||
},
|
||||
));
|
||||
|
||||
edits.push((index_range, replacement_text));
|
||||
}
|
||||
|
||||
debug_assert!(edits.iter().is_sorted_by_key(|(range, _)| range.start));
|
||||
|
||||
let mut new_index_text = Rope::new();
|
||||
let mut index_cursor = index_text.cursor(0);
|
||||
|
||||
for (old_range, replacement_text) in edits {
|
||||
new_index_text.append(index_cursor.slice(old_range.start));
|
||||
index_cursor.seek_forward(old_range.end);
|
||||
@@ -354,12 +406,14 @@ impl BufferDiffInner {
|
||||
});
|
||||
|
||||
let mut secondary_cursor = None;
|
||||
let mut pending_hunks = TreeMap::default();
|
||||
let mut pending_hunks_cursor = None;
|
||||
if let Some(secondary) = secondary.as_ref() {
|
||||
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
secondary_cursor = Some(cursor);
|
||||
pending_hunks = secondary.pending_hunks.clone();
|
||||
let mut cursor = secondary.pending_hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
pending_hunks_cursor = Some(cursor);
|
||||
}
|
||||
|
||||
let max_point = buffer.max_point();
|
||||
@@ -378,16 +432,33 @@ impl BufferDiffInner {
|
||||
end_anchor = buffer.anchor_before(end_point);
|
||||
}
|
||||
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::None;
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
|
||||
|
||||
let mut has_pending = false;
|
||||
if let Some(pending_hunk) = pending_hunks.get(&start_base) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
if let Some(pending_cursor) = pending_hunks_cursor.as_mut() {
|
||||
if start_anchor
|
||||
.cmp(&pending_cursor.start().buffer_range.start, buffer)
|
||||
.is_gt()
|
||||
{
|
||||
pending_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
|
||||
}
|
||||
|
||||
if let Some(pending_hunk) = pending_cursor.item() {
|
||||
let mut pending_range = pending_hunk.buffer_range.to_point(buffer);
|
||||
if pending_range.end.column > 0 {
|
||||
pending_range.end.row += 1;
|
||||
pending_range.end.column = 0;
|
||||
}
|
||||
|
||||
if pending_range == (start_point..end_point) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -449,7 +520,7 @@ impl BufferDiffInner {
|
||||
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
||||
buffer_range: hunk.buffer_range.clone(),
|
||||
// The secondary status is not used by callers of this method.
|
||||
secondary_status: DiffHunkSecondaryStatus::None,
|
||||
secondary_status: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -724,7 +795,7 @@ impl BufferDiff {
|
||||
base_text,
|
||||
hunks,
|
||||
base_text_exists,
|
||||
pending_hunks: TreeMap::default(),
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -740,8 +811,8 @@ impl BufferDiff {
|
||||
cx.background_spawn(async move {
|
||||
BufferDiffInner {
|
||||
base_text: base_text_snapshot,
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
pending_hunks: TreeMap::default(),
|
||||
base_text_exists,
|
||||
}
|
||||
})
|
||||
@@ -751,7 +822,7 @@ impl BufferDiff {
|
||||
BufferDiffInner {
|
||||
base_text: language::Buffer::build_empty_snapshot(cx),
|
||||
hunks: SumTree::new(buffer),
|
||||
pending_hunks: TreeMap::default(),
|
||||
pending_hunks: SumTree::new(buffer),
|
||||
base_text_exists: false,
|
||||
}
|
||||
}
|
||||
@@ -767,7 +838,7 @@ impl BufferDiff {
|
||||
pub fn clear_pending_hunks(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(secondary_diff) = &self.secondary_diff {
|
||||
secondary_diff.update(cx, |diff, _| {
|
||||
diff.inner.pending_hunks.clear();
|
||||
diff.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary::default());
|
||||
});
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: Some(Anchor::MIN..Anchor::MAX),
|
||||
@@ -783,18 +854,17 @@ impl BufferDiff {
|
||||
file_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Rope> {
|
||||
let (new_index_text, pending_hunks) = self.inner.stage_or_unstage_hunks(
|
||||
let (new_index_text, new_pending_hunks) = self.inner.stage_or_unstage_hunks(
|
||||
&self.secondary_diff.as_ref()?.read(cx).inner,
|
||||
stage,
|
||||
&hunks,
|
||||
buffer,
|
||||
file_exists,
|
||||
);
|
||||
|
||||
if let Some(unstaged_diff) = &self.secondary_diff {
|
||||
unstaged_diff.update(cx, |diff, _| {
|
||||
for (offset, pending_hunk) in pending_hunks {
|
||||
diff.inner.pending_hunks.insert(offset, pending_hunk);
|
||||
}
|
||||
diff.inner.pending_hunks = new_pending_hunks;
|
||||
});
|
||||
}
|
||||
cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
|
||||
@@ -828,7 +898,6 @@ impl BufferDiff {
|
||||
Some(start..end)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn update_diff(
|
||||
this: Entity<BufferDiff>,
|
||||
buffer: text::BufferSnapshot,
|
||||
@@ -838,8 +907,8 @@ impl BufferDiff {
|
||||
language: Option<Arc<Language>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> anyhow::Result<Option<Range<Anchor>>> {
|
||||
let snapshot = if base_text_changed || language_changed {
|
||||
) -> anyhow::Result<BufferDiffSnapshot> {
|
||||
let inner = if base_text_changed || language_changed {
|
||||
cx.update(|cx| {
|
||||
Self::build(
|
||||
buffer.clone(),
|
||||
@@ -861,18 +930,45 @@ impl BufferDiff {
|
||||
})?
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(cx, |this, _| this.set_state(snapshot, &buffer))
|
||||
Ok(BufferDiffSnapshot {
|
||||
inner,
|
||||
secondary_diff: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_diff_from(
|
||||
pub fn set_snapshot(
|
||||
&mut self,
|
||||
buffer: &text::BufferSnapshot,
|
||||
other: &Entity<Self>,
|
||||
new_snapshot: BufferDiffSnapshot,
|
||||
language_changed: bool,
|
||||
secondary_changed_range: Option<Range<Anchor>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Range<Anchor>> {
|
||||
let other = other.read(cx).inner.clone();
|
||||
self.set_state(other, buffer)
|
||||
let changed_range = self.set_state(new_snapshot.inner, buffer);
|
||||
if language_changed {
|
||||
cx.emit(BufferDiffEvent::LanguageChanged);
|
||||
}
|
||||
|
||||
let changed_range = match (secondary_changed_range, changed_range) {
|
||||
(None, None) => None,
|
||||
(Some(unstaged_range), None) => self.range_to_hunk_range(unstaged_range, &buffer, cx),
|
||||
(None, Some(uncommitted_range)) => Some(uncommitted_range),
|
||||
(Some(unstaged_range), Some(uncommitted_range)) => {
|
||||
let mut start = uncommitted_range.start;
|
||||
let mut end = uncommitted_range.end;
|
||||
if let Some(unstaged_range) = self.range_to_hunk_range(unstaged_range, &buffer, cx)
|
||||
{
|
||||
start = unstaged_range.start.min(&uncommitted_range.start, &buffer);
|
||||
end = unstaged_range.end.max(&uncommitted_range.end, &buffer);
|
||||
}
|
||||
Some(start..end)
|
||||
}
|
||||
};
|
||||
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: changed_range.clone(),
|
||||
});
|
||||
changed_range
|
||||
}
|
||||
|
||||
fn set_state(
|
||||
@@ -890,7 +986,9 @@ impl BufferDiff {
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
};
|
||||
let pending_hunks = mem::take(&mut self.inner.pending_hunks);
|
||||
|
||||
let pending_hunks = mem::replace(&mut self.inner.pending_hunks, SumTree::new(buffer));
|
||||
|
||||
self.inner = new_state;
|
||||
if !base_text_changed {
|
||||
self.inner.pending_hunks = pending_hunks;
|
||||
@@ -1123,21 +1221,21 @@ impl DiffHunkStatus {
|
||||
pub fn deleted_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Deleted,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn added_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Added,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn modified_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Modified,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1145,13 +1243,14 @@ impl DiffHunkStatus {
|
||||
/// Range (crossing new lines), old, new
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
#[track_caller]
|
||||
pub fn assert_hunks<Iter>(
|
||||
diff_hunks: Iter,
|
||||
pub fn assert_hunks<ExpectedText, HunkIter>(
|
||||
diff_hunks: HunkIter,
|
||||
buffer: &text::BufferSnapshot,
|
||||
diff_base: &str,
|
||||
expected_hunks: &[(Range<u32>, &str, &str, DiffHunkStatus)],
|
||||
expected_hunks: &[(Range<u32>, ExpectedText, ExpectedText, DiffHunkStatus)],
|
||||
) where
|
||||
Iter: Iterator<Item = DiffHunk>,
|
||||
HunkIter: Iterator<Item = DiffHunk>,
|
||||
ExpectedText: AsRef<str>,
|
||||
{
|
||||
let actual_hunks = diff_hunks
|
||||
.map(|hunk| {
|
||||
@@ -1171,14 +1270,14 @@ pub fn assert_hunks<Iter>(
|
||||
.map(|(r, old_text, new_text, status)| {
|
||||
(
|
||||
Point::new(r.start, 0)..Point::new(r.end, 0),
|
||||
*old_text,
|
||||
new_text.to_string(),
|
||||
old_text.as_ref(),
|
||||
new_text.as_ref().to_string(),
|
||||
*status,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(actual_hunks, expected_hunks);
|
||||
pretty_assertions::assert_eq!(actual_hunks, expected_hunks);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1237,7 +1336,7 @@ mod tests {
|
||||
);
|
||||
|
||||
diff = cx.update(|cx| BufferDiff::build_empty(&buffer, cx));
|
||||
assert_hunks(
|
||||
assert_hunks::<&str, _>(
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, None),
|
||||
&buffer,
|
||||
&diff_base,
|
||||
@@ -1575,7 +1674,10 @@ mod tests {
|
||||
.hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
|
||||
.collect::<Vec<_>>();
|
||||
for hunk in &hunks {
|
||||
assert_ne!(hunk.secondary_status, DiffHunkSecondaryStatus::None)
|
||||
assert_ne!(
|
||||
hunk.secondary_status,
|
||||
DiffHunkSecondaryStatus::NoSecondaryHunk
|
||||
)
|
||||
}
|
||||
|
||||
let new_index_text = diff
|
||||
@@ -1854,10 +1956,10 @@ mod tests {
|
||||
let hunk_to_change = hunk.clone();
|
||||
let stage = match hunk.secondary_status {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk => {
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::None;
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
|
||||
true
|
||||
}
|
||||
DiffHunkSecondaryStatus::None => {
|
||||
DiffHunkSecondaryStatus::NoSecondaryHunk => {
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
|
||||
false
|
||||
}
|
||||
|
||||
@@ -229,7 +229,6 @@ impl Database {
|
||||
}
|
||||
|
||||
/// Creates a new channel message.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
|
||||
@@ -122,7 +122,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn get_or_create_user_by_github_account_tx(
|
||||
&self,
|
||||
github_login: &str,
|
||||
|
||||
@@ -289,7 +289,6 @@ impl LlmDatabase {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn record_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
@@ -554,7 +553,6 @@ impl LlmDatabase {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn update_usage_for_measure(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
|
||||
@@ -33,7 +33,6 @@ pub struct LlmTokenClaims {
|
||||
const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
impl LlmTokenClaims {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn create(
|
||||
user: &user::Model,
|
||||
is_staff: bool,
|
||||
|
||||
@@ -396,6 +396,7 @@ impl Server {
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Stage>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Unstage>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Commit>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitInit>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetRemotes>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitShow>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitReset>)
|
||||
@@ -697,7 +698,6 @@ impl Server {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn handle_connection(
|
||||
self: &Arc<Self>,
|
||||
connection: Connection,
|
||||
@@ -1081,7 +1081,6 @@ pub fn routes(server: Arc<Server>) -> Router<(), Body> {
|
||||
.layer(Extension(server))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn handle_websocket_request(
|
||||
TypedHeader(ProtocolVersion(protocol_version)): TypedHeader<ProtocolVersion>,
|
||||
app_version_header: Option<TypedHeader<AppVersionHeader>>,
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::{
|
||||
tests::{rust_lang, TestServer},
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
|
||||
@@ -1983,7 +1982,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
blame_entry("3a3a3a", 2..3),
|
||||
blame_entry("4c4c4c", 3..4),
|
||||
],
|
||||
permalinks: HashMap::default(), // This field is deprecrated
|
||||
messages: [
|
||||
("1b1b1b", "message for idx-0"),
|
||||
("0d0d0d", "message for idx-1"),
|
||||
@@ -2040,7 +2038,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
// client_b now requests git blame for the open buffer
|
||||
editor_b.update_in(cx_b, |editor_b, window, cx| {
|
||||
assert!(editor_b.blame().is_none());
|
||||
editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, window, cx);
|
||||
editor_b.toggle_git_blame(&git::Blame {}, window, cx);
|
||||
});
|
||||
|
||||
cx_a.executor().run_until_parked();
|
||||
|
||||
@@ -463,7 +463,6 @@ impl<T: RandomizedTest> TestPlan<T> {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn apply_server_operation(
|
||||
plan: Arc<Mutex<Self>>,
|
||||
deterministic: BackgroundExecutor,
|
||||
|
||||
@@ -271,7 +271,7 @@ impl TestServer {
|
||||
|
||||
let git_hosting_provider_registry = cx.update(GitHostingProviderRegistry::default_global);
|
||||
git_hosting_provider_registry
|
||||
.register_hosting_provider(Arc::new(git_hosting_providers::Github));
|
||||
.register_hosting_provider(Arc::new(git_hosting_providers::Github::new()));
|
||||
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
|
||||
@@ -869,7 +869,6 @@ impl CollabPanel {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_participant_project(
|
||||
&self,
|
||||
project_id: u64,
|
||||
|
||||
@@ -417,22 +417,17 @@ impl ComponentPreview {
|
||||
}
|
||||
}
|
||||
|
||||
fn test_status_toast(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn test_status_toast(&self, cx: &mut Context<Self>) {
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let status_toast = StatusToast::new(
|
||||
"`zed/new-notification-system` created!",
|
||||
window,
|
||||
cx,
|
||||
|this, _, cx| {
|
||||
let status_toast =
|
||||
StatusToast::new("`zed/new-notification-system` created!", cx, |this, _cx| {
|
||||
this.icon(ToastIcon::new(IconName::GitBranchSmall).color(Color::Muted))
|
||||
.action(
|
||||
"Open Pull Request",
|
||||
cx.listener(|_, _, _, cx| cx.open_url("https://github.com/")),
|
||||
)
|
||||
},
|
||||
);
|
||||
workspace.toggle_status_toast(window, cx, status_toast)
|
||||
.action("Open Pull Request", |_, cx| {
|
||||
cx.open_url("https://github.com/")
|
||||
})
|
||||
});
|
||||
workspace.toggle_status_toast(status_toast, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -478,8 +473,8 @@ impl Render for ComponentPreview {
|
||||
div().w_full().pb_4().child(
|
||||
Button::new("toast-test", "Launch Toast")
|
||||
.on_click(cx.listener({
|
||||
move |this, _, window, cx| {
|
||||
this.test_status_toast(window, cx);
|
||||
move |this, _, _window, cx| {
|
||||
this.test_status_toast(cx);
|
||||
cx.notify();
|
||||
}
|
||||
}))
|
||||
|
||||
@@ -21,6 +21,7 @@ context_server_settings.workspace = true
|
||||
extension.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use assistant_tool::Tool;
|
||||
use assistant_tool::{Tool, ToolSource};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
|
||||
use crate::manager::ContextServerManager;
|
||||
@@ -37,6 +38,12 @@ impl Tool for ContextServerTool {
|
||||
self.tool.description.clone().unwrap_or_default()
|
||||
}
|
||||
|
||||
fn source(&self) -> ToolSource {
|
||||
ToolSource::ContextServer {
|
||||
id: self.server_id.clone().into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
match &self.tool.input_schema {
|
||||
serde_json::Value::Null => {
|
||||
@@ -52,6 +59,7 @@ impl Tool for ContextServerTool {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
_project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
|
||||
@@ -623,16 +623,21 @@ impl Copilot {
|
||||
|
||||
pub fn sign_out(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
self.update_sign_in_status(request::SignInStatus::NotSignedIn, cx);
|
||||
if let CopilotServer::Running(RunningCopilotServer { lsp: server, .. }) = &self.server {
|
||||
let server = server.clone();
|
||||
cx.background_spawn(async move {
|
||||
server
|
||||
.request::<request::SignOut>(request::SignOutParams {})
|
||||
.await?;
|
||||
match &self.server {
|
||||
CopilotServer::Running(RunningCopilotServer { lsp: server, .. }) => {
|
||||
let server = server.clone();
|
||||
cx.background_spawn(async move {
|
||||
server
|
||||
.request::<request::SignOut>(request::SignOutParams {})
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
CopilotServer::Disabled => cx.background_spawn(async move {
|
||||
clear_copilot_config_dir().await;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("copilot hasn't started yet")))
|
||||
}),
|
||||
_ => Task::ready(Err(anyhow!("copilot hasn't started yet"))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1016,6 +1021,10 @@ async fn clear_copilot_dir() {
|
||||
remove_matching(paths::copilot_dir(), |_| true).await
|
||||
}
|
||||
|
||||
async fn clear_copilot_config_dir() {
|
||||
remove_matching(copilot_chat::copilot_chat_config_dir(), |_| true).await
|
||||
}
|
||||
|
||||
async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
|
||||
const SERVER_PATH: &str = "dist/language-server.js";
|
||||
|
||||
|
||||
@@ -4,13 +4,14 @@ use std::sync::OnceLock;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::DateTime;
|
||||
use collections::HashSet;
|
||||
use fs::Fs;
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
use gpui::{prelude::*, App, AsyncApp, Global};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use paths::home_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::watch_config_file;
|
||||
use settings::watch_config_dir;
|
||||
use strum::EnumIter;
|
||||
|
||||
pub const COPILOT_CHAT_COMPLETION_URL: &str = "https://api.githubcopilot.com/chat/completions";
|
||||
@@ -212,7 +213,7 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut App) {
|
||||
cx.set_global(GlobalCopilotChat(copilot_chat));
|
||||
}
|
||||
|
||||
fn copilot_chat_config_dir() -> &'static PathBuf {
|
||||
pub fn copilot_chat_config_dir() -> &'static PathBuf {
|
||||
static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
|
||||
|
||||
COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
|
||||
@@ -237,27 +238,18 @@ impl CopilotChat {
|
||||
}
|
||||
|
||||
pub fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &App) -> Self {
|
||||
let config_paths = copilot_chat_config_paths();
|
||||
|
||||
let resolve_config_path = {
|
||||
let fs = fs.clone();
|
||||
async move {
|
||||
for config_path in config_paths.iter() {
|
||||
if fs.metadata(config_path).await.is_ok_and(|v| v.is_some()) {
|
||||
return config_path.clone();
|
||||
}
|
||||
}
|
||||
config_paths[0].clone()
|
||||
}
|
||||
};
|
||||
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
|
||||
let dir_path = copilot_chat_config_dir();
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
let config_file = resolve_config_path.await;
|
||||
let mut config_file_rx = watch_config_file(cx.background_executor(), fs, config_file);
|
||||
|
||||
while let Some(contents) = config_file_rx.next().await {
|
||||
let mut parent_watch_rx = watch_config_dir(
|
||||
cx.background_executor(),
|
||||
fs.clone(),
|
||||
dir_path.clone(),
|
||||
config_paths,
|
||||
);
|
||||
while let Some(contents) = parent_watch_rx.next().await {
|
||||
let oauth_token = extract_oauth_token(contents);
|
||||
|
||||
cx.update(|cx| {
|
||||
if let Some(this) = Self::global(cx).as_ref() {
|
||||
this.update(cx, |this, cx| {
|
||||
|
||||
@@ -311,7 +311,10 @@ impl ProjectDiagnosticsEditor {
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
if let Some(existing) = workspace.item_of_type::<ProjectDiagnosticsEditor>(cx) {
|
||||
workspace.activate_item(&existing, true, true, window, cx);
|
||||
let is_active = workspace
|
||||
.active_item(cx)
|
||||
.is_some_and(|item| item.item_id() == existing.item_id());
|
||||
workspace.activate_item(&existing, true, !is_active, window, cx);
|
||||
} else {
|
||||
let workspace_handle = cx.entity().downgrade();
|
||||
|
||||
|
||||
@@ -412,7 +412,6 @@ gpui::actions!(
|
||||
Tab,
|
||||
Backtab,
|
||||
ToggleAutoSignatureHelp,
|
||||
ToggleGitBlame,
|
||||
ToggleGitBlameInline,
|
||||
ToggleIndentGuides,
|
||||
ToggleInlayHints,
|
||||
|
||||
@@ -500,7 +500,7 @@ impl CompletionsMenu {
|
||||
highlight.font_weight = None;
|
||||
if completion
|
||||
.source
|
||||
.lsp_completion()
|
||||
.lsp_completion(false)
|
||||
.and_then(|lsp_completion| lsp_completion.deprecated)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
@@ -711,10 +711,12 @@ impl CompletionsMenu {
|
||||
|
||||
let completion = &completions[mat.candidate_id];
|
||||
let sort_key = completion.sort_key();
|
||||
let sort_text = completion
|
||||
.source
|
||||
.lsp_completion()
|
||||
.and_then(|lsp_completion| lsp_completion.sort_text.as_deref());
|
||||
let sort_text =
|
||||
if let CompletionSource::Lsp { lsp_completion, .. } = &completion.source {
|
||||
lsp_completion.sort_text.as_deref()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let score = Reverse(OrderedFloat(mat.score));
|
||||
|
||||
if mat.score >= 0.2 {
|
||||
|
||||
@@ -113,7 +113,6 @@ pub struct DisplayMap {
|
||||
}
|
||||
|
||||
impl DisplayMap {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
buffer: Entity<MultiBuffer>,
|
||||
font: Font,
|
||||
|
||||
@@ -726,7 +726,6 @@ impl BlockMap {
|
||||
self.show_excerpt_controls
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn header_and_footer_blocks<'a, R, T>(
|
||||
show_excerpt_controls: bool,
|
||||
excerpt_footer_height: u32,
|
||||
|
||||
@@ -607,12 +607,6 @@ pub trait Addon: 'static {
|
||||
fn to_any(&self) -> &dyn std::any::Any;
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub enum IsVimMode {
|
||||
Yes,
|
||||
No,
|
||||
}
|
||||
|
||||
/// Zed's primary implementation of text input, allowing users to edit a [`MultiBuffer`].
|
||||
///
|
||||
/// See the [module level documentation](self) for more information.
|
||||
@@ -644,6 +638,7 @@ pub struct Editor {
|
||||
inline_diagnostics_enabled: bool,
|
||||
inline_diagnostics: Vec<(Anchor, InlineDiagnostic)>,
|
||||
soft_wrap_mode_override: Option<language_settings::SoftWrap>,
|
||||
hard_wrap: Option<usize>,
|
||||
|
||||
// TODO: make this a access method
|
||||
pub project: Option<Entity<Project>>,
|
||||
@@ -1355,6 +1350,7 @@ impl Editor {
|
||||
inline_diagnostics_update: Task::ready(()),
|
||||
inline_diagnostics: Vec::new(),
|
||||
soft_wrap_mode_override,
|
||||
hard_wrap: None,
|
||||
completion_provider: project.clone().map(|project| Box::new(project) as _),
|
||||
semantics_provider: project.clone().map(|project| Rc::new(project) as _),
|
||||
collaboration_hub: project.clone().map(|project| Box::new(project) as _),
|
||||
@@ -3192,6 +3188,19 @@ impl Editor {
|
||||
|
||||
let trigger_in_words =
|
||||
this.show_edit_predictions_in_menu() || !had_active_inline_completion;
|
||||
if this.hard_wrap.is_some() {
|
||||
let latest: Range<Point> = this.selections.newest(cx).range();
|
||||
if latest.is_empty()
|
||||
&& this
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.snapshot(cx)
|
||||
.line_len(MultiBufferRow(latest.start.row))
|
||||
== latest.start.column
|
||||
{
|
||||
this.rewrap_impl(true, cx)
|
||||
}
|
||||
}
|
||||
this.trigger_completion_on_input(&text, trigger_in_words, window, cx);
|
||||
linked_editing_ranges::refresh_linked_ranges(this, window, cx);
|
||||
this.refresh_inline_completion(true, false, window, cx);
|
||||
@@ -5931,7 +5940,6 @@ impl Editor {
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_X: Pixels = Pixels(24.);
|
||||
const EDIT_PREDICTION_POPOVER_PADDING_Y: Pixels = Pixels(2.);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_popover(
|
||||
&mut self,
|
||||
text_bounds: &Bounds<Pixels>,
|
||||
@@ -6043,7 +6051,6 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_modifier_jump_popover(
|
||||
&mut self,
|
||||
text_bounds: &Bounds<Pixels>,
|
||||
@@ -6139,7 +6146,6 @@ impl Editor {
|
||||
Some((element, origin))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_scroll_popover(
|
||||
&mut self,
|
||||
to_y: impl Fn(Size<Pixels>) -> Pixels,
|
||||
@@ -6170,7 +6176,6 @@ impl Editor {
|
||||
Some((element, origin))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_eager_jump_popover(
|
||||
&mut self,
|
||||
text_bounds: &Bounds<Pixels>,
|
||||
@@ -6240,7 +6245,6 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_end_of_line_popover(
|
||||
self: &mut Editor,
|
||||
label: &'static str,
|
||||
@@ -6299,7 +6303,6 @@ impl Editor {
|
||||
Some((element, origin))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_diff_popover(
|
||||
self: &Editor,
|
||||
text_bounds: &Bounds<Pixels>,
|
||||
@@ -6607,7 +6610,6 @@ impl Editor {
|
||||
editor_bg_color.blend(accent_color.opacity(0.6))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_edit_prediction_cursor_popover(
|
||||
&self,
|
||||
min_width: Pixels,
|
||||
@@ -8514,10 +8516,10 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn rewrap(&mut self, _: &Rewrap, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.rewrap_impl(IsVimMode::No, cx)
|
||||
self.rewrap_impl(false, cx)
|
||||
}
|
||||
|
||||
pub fn rewrap_impl(&mut self, is_vim_mode: IsVimMode, cx: &mut Context<Self>) {
|
||||
pub fn rewrap_impl(&mut self, override_language_settings: bool, cx: &mut Context<Self>) {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut selections = selections.iter().peekable();
|
||||
@@ -8591,7 +8593,9 @@ impl Editor {
|
||||
RewrapBehavior::Anywhere => true,
|
||||
};
|
||||
|
||||
let should_rewrap = is_vim_mode == IsVimMode::Yes || allow_rewrap_based_on_language;
|
||||
let should_rewrap = override_language_settings
|
||||
|| allow_rewrap_based_on_language
|
||||
|| self.hard_wrap.is_some();
|
||||
if !should_rewrap {
|
||||
continue;
|
||||
}
|
||||
@@ -8639,9 +8643,11 @@ impl Editor {
|
||||
continue;
|
||||
};
|
||||
|
||||
let wrap_column = buffer
|
||||
.language_settings_at(Point::new(start_row, 0), cx)
|
||||
.preferred_line_length as usize;
|
||||
let wrap_column = self.hard_wrap.unwrap_or_else(|| {
|
||||
buffer
|
||||
.language_settings_at(Point::new(start_row, 0), cx)
|
||||
.preferred_line_length as usize
|
||||
});
|
||||
let wrapped_text = wrap_with_prefix(
|
||||
line_prefix,
|
||||
lines_without_prefixes.join(" "),
|
||||
@@ -8652,7 +8658,7 @@ impl Editor {
|
||||
// TODO: should always use char-based diff while still supporting cursor behavior that
|
||||
// matches vim.
|
||||
let mut diff_options = DiffOptions::default();
|
||||
if is_vim_mode == IsVimMode::Yes {
|
||||
if override_language_settings {
|
||||
diff_options.max_word_diff_len = 0;
|
||||
diff_options.max_word_diff_line_count = 0;
|
||||
} else {
|
||||
@@ -11639,7 +11645,7 @@ impl Editor {
|
||||
fn go_to_next_hunk(&mut self, _: &GoToHunk, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
self.go_to_hunk_after_or_before_position(
|
||||
self.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
selection.head(),
|
||||
Direction::Next,
|
||||
@@ -11648,7 +11654,7 @@ impl Editor {
|
||||
);
|
||||
}
|
||||
|
||||
fn go_to_hunk_after_or_before_position(
|
||||
fn go_to_hunk_before_or_after_position(
|
||||
&mut self,
|
||||
snapshot: &EditorSnapshot,
|
||||
position: Point,
|
||||
@@ -11699,7 +11705,7 @@ impl Editor {
|
||||
) {
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
self.go_to_hunk_after_or_before_position(
|
||||
self.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
selection.head(),
|
||||
Direction::Prev,
|
||||
@@ -13861,23 +13867,38 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let newest_range = self.selections.newest::<Point>(cx).range();
|
||||
|
||||
let run_twice = snapshot
|
||||
.hunks_for_ranges([newest_range])
|
||||
.first()
|
||||
.is_some_and(|hunk| {
|
||||
let next_line = Point::new(hunk.row_range.end.0 + 1, 0);
|
||||
self.hunk_after_position(&snapshot, next_line)
|
||||
.is_some_and(|other| other.row_range == hunk.row_range)
|
||||
});
|
||||
|
||||
if run_twice {
|
||||
self.go_to_next_hunk(&GoToHunk, window, cx);
|
||||
}
|
||||
self.stage_or_unstage_diff_hunks(stage, ranges, cx);
|
||||
self.go_to_next_hunk(&GoToHunk, window, cx);
|
||||
let snapshot = self.snapshot(window, cx);
|
||||
let position = self.selections.newest::<Point>(cx).head();
|
||||
let mut row = snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(position..snapshot.buffer_snapshot.max_point())
|
||||
.find(|hunk| hunk.row_range.start.0 > position.row)
|
||||
.map(|hunk| hunk.row_range.start);
|
||||
|
||||
let all_diff_hunks_expanded = self.buffer().read(cx).all_diff_hunks_expanded();
|
||||
// Outside of the project diff editor, wrap around to the beginning.
|
||||
if !all_diff_hunks_expanded {
|
||||
row = row.or_else(|| {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.diff_hunks_in_range(Point::zero()..position)
|
||||
.find(|hunk| hunk.row_range.end.0 < position.row)
|
||||
.map(|hunk| hunk.row_range.start)
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(row) = row {
|
||||
let destination = Point::new(row.0, 0);
|
||||
let autoscroll = Autoscroll::center();
|
||||
|
||||
self.unfold_ranges(&[destination..destination], false, false, cx);
|
||||
self.change_selections(Some(autoscroll), window, cx, |s| {
|
||||
s.select_ranges([destination..destination]);
|
||||
});
|
||||
} else if all_diff_hunks_expanded {
|
||||
window.dispatch_action(::git::ExpandCommitEditor.boxed_clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn do_stage_or_unstage(
|
||||
@@ -14207,6 +14228,11 @@ impl Editor {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_hard_wrap(&mut self, hard_wrap: Option<usize>, cx: &mut Context<Self>) {
|
||||
self.hard_wrap = hard_wrap;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_text_style_refinement(&mut self, style: TextStyleRefinement) {
|
||||
self.text_style_refinement = Some(style);
|
||||
}
|
||||
@@ -14303,6 +14329,13 @@ impl Editor {
|
||||
EditorSettings::override_global(editor_settings, cx);
|
||||
}
|
||||
|
||||
pub fn line_numbers_enabled(&self, cx: &App) -> bool {
|
||||
if let Some(show_line_numbers) = self.show_line_numbers {
|
||||
return show_line_numbers;
|
||||
}
|
||||
EditorSettings::get_global(cx).gutter.line_numbers
|
||||
}
|
||||
|
||||
pub fn should_use_relative_line_numbers(&self, cx: &mut App) -> bool {
|
||||
self.use_relative_line_numbers
|
||||
.unwrap_or(EditorSettings::get_global(cx).relative_line_numbers)
|
||||
@@ -14482,7 +14515,7 @@ impl Editor {
|
||||
|
||||
pub fn toggle_git_blame(
|
||||
&mut self,
|
||||
_: &ToggleGitBlame,
|
||||
_: &::git::Blame,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -17017,6 +17050,7 @@ fn snippet_completions(
|
||||
sort_text: Some(char::MAX.to_string()),
|
||||
..lsp::CompletionItem::default()
|
||||
}),
|
||||
lsp_defaults: None,
|
||||
},
|
||||
label: CodeLabel {
|
||||
text: matching_prefix.clone(),
|
||||
|
||||
@@ -4737,6 +4737,31 @@ async fn test_rewrap(cx: &mut TestAppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hard_wrap(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.update_editor(|editor, _, cx| {
|
||||
editor.set_hard_wrap(Some(14), cx);
|
||||
});
|
||||
|
||||
cx.set_state(indoc!(
|
||||
"
|
||||
one two three ˇ
|
||||
"
|
||||
));
|
||||
cx.simulate_input("four");
|
||||
cx.run_until_parked();
|
||||
|
||||
cx.assert_editor_state(indoc!(
|
||||
"
|
||||
one two three
|
||||
fourˇ
|
||||
"
|
||||
));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_clipboard(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -12334,24 +12359,6 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
},
|
||||
};
|
||||
|
||||
let item_0_out = lsp::CompletionItem {
|
||||
commit_characters: Some(default_commit_characters.clone()),
|
||||
insert_text_format: Some(default_insert_text_format),
|
||||
..item_0
|
||||
};
|
||||
let items_out = iter::once(item_0_out)
|
||||
.chain(items[1..].iter().map(|item| lsp::CompletionItem {
|
||||
commit_characters: Some(default_commit_characters.clone()),
|
||||
data: Some(default_data.clone()),
|
||||
insert_text_mode: Some(default_insert_text_mode),
|
||||
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
|
||||
range: default_edit_range,
|
||||
new_text: item.label.clone(),
|
||||
})),
|
||||
..item.clone()
|
||||
}))
|
||||
.collect::<Vec<lsp::CompletionItem>>();
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
@@ -12370,10 +12377,11 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
|
||||
let completion_data = default_data.clone();
|
||||
let completion_characters = default_commit_characters.clone();
|
||||
let completion_items = items.clone();
|
||||
cx.handle_request::<lsp::request::Completion, _, _>(move |_, _, _| {
|
||||
let default_data = completion_data.clone();
|
||||
let default_commit_characters = completion_characters.clone();
|
||||
let items = items.clone();
|
||||
let items = completion_items.clone();
|
||||
async move {
|
||||
Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
|
||||
items,
|
||||
@@ -12422,7 +12430,7 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
.iter()
|
||||
.map(|mat| mat.string.clone())
|
||||
.collect::<Vec<String>>(),
|
||||
items_out
|
||||
items
|
||||
.iter()
|
||||
.map(|completion| completion.label.clone())
|
||||
.collect::<Vec<String>>()
|
||||
@@ -12435,14 +12443,18 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
// with 4 from the end.
|
||||
assert_eq!(
|
||||
*resolved_items.lock(),
|
||||
[
|
||||
&items_out[0..16],
|
||||
&items_out[items_out.len() - 4..items_out.len()]
|
||||
]
|
||||
.concat()
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<lsp::CompletionItem>>()
|
||||
[&items[0..16], &items[items.len() - 4..items.len()]]
|
||||
.concat()
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
if item.data.is_none() {
|
||||
item.data = Some(default_data.clone());
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect::<Vec<lsp::CompletionItem>>(),
|
||||
"Items sent for resolve should be unchanged modulo resolve `data` filled with default if missing"
|
||||
);
|
||||
resolved_items.lock().clear();
|
||||
|
||||
@@ -12453,9 +12465,15 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
// Completions that have already been resolved are skipped.
|
||||
assert_eq!(
|
||||
*resolved_items.lock(),
|
||||
items_out[items_out.len() - 16..items_out.len() - 4]
|
||||
items[items.len() - 16..items.len() - 4]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
if item.data.is_none() {
|
||||
item.data = Some(default_data.clone());
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect::<Vec<lsp::CompletionItem>>()
|
||||
);
|
||||
resolved_items.lock().clear();
|
||||
|
||||
@@ -32,15 +32,14 @@ use collections::{BTreeMap, HashMap, HashSet};
|
||||
use file_icons::FileIcons;
|
||||
use git::{blame::BlameEntry, status::FileStatus, Oid};
|
||||
use gpui::{
|
||||
anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, pattern_slash,
|
||||
point, px, quad, relative, size, solid_background, svg, transparent_black, Action, AnyElement,
|
||||
App, AvailableSpace, Axis, Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner,
|
||||
Corners, CursorStyle, DispatchPhase, Edges, Element, ElementInputHandler, Entity,
|
||||
Focusable as _, FontId, GlobalElementId, Hitbox, Hsla, InteractiveElement, IntoElement,
|
||||
Keystroke, Length, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent,
|
||||
MouseUpEvent, PaintQuad, ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine,
|
||||
SharedString, Size, StatefulInteractiveElement, Style, Styled, Subscription, TextRun,
|
||||
TextStyleRefinement, Window,
|
||||
anchored, deferred, div, fill, linear_color_stop, linear_gradient, outline, point, px, quad,
|
||||
relative, size, solid_background, svg, transparent_black, Action, AnyElement, App,
|
||||
AvailableSpace, Axis, Bounds, ClickEvent, ClipboardItem, ContentMask, Context, Corner, Corners,
|
||||
CursorStyle, DispatchPhase, Edges, Element, ElementInputHandler, Entity, Focusable as _,
|
||||
FontId, GlobalElementId, Hitbox, Hsla, InteractiveElement, IntoElement, Keystroke, Length,
|
||||
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad,
|
||||
ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size,
|
||||
StatefulInteractiveElement, Style, Styled, Subscription, TextRun, TextStyleRefinement, Window,
|
||||
};
|
||||
use inline_completion::Direction;
|
||||
use itertools::Itertools;
|
||||
@@ -958,7 +957,6 @@ impl EditorElement {
|
||||
cx.notify()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_selections(
|
||||
&self,
|
||||
start_anchor: Anchor,
|
||||
@@ -1130,7 +1128,6 @@ impl EditorElement {
|
||||
cursors
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_visible_cursors(
|
||||
&self,
|
||||
snapshot: &EditorSnapshot,
|
||||
@@ -1484,7 +1481,6 @@ impl EditorElement {
|
||||
axis_pair(horizontal_scrollbar, vertical_scrollbar)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint_crease_toggles(
|
||||
&self,
|
||||
crease_toggles: &mut [Option<AnyElement>],
|
||||
@@ -1519,7 +1515,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint_crease_trailers(
|
||||
&self,
|
||||
trailers: Vec<Option<AnyElement>>,
|
||||
@@ -1596,7 +1591,6 @@ impl EditorElement {
|
||||
display_hunks
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_inline_diagnostics(
|
||||
&self,
|
||||
line_layouts: &[LineWithInvisibles],
|
||||
@@ -1747,7 +1741,6 @@ impl EditorElement {
|
||||
elements
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_inline_blame(
|
||||
&self,
|
||||
display_row: DisplayRow,
|
||||
@@ -1827,7 +1820,6 @@ impl EditorElement {
|
||||
Some(element)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_blame_entries(
|
||||
&self,
|
||||
buffer_rows: &[RowInfo],
|
||||
@@ -1896,7 +1888,6 @@ impl EditorElement {
|
||||
Some(shaped_lines)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_indent_guides(
|
||||
&self,
|
||||
content_origin: gpui::Point<Pixels>,
|
||||
@@ -2014,7 +2005,6 @@ impl EditorElement {
|
||||
(offset_y, length)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_run_indicators(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
@@ -2108,7 +2098,6 @@ impl EditorElement {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_code_actions_indicator(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
@@ -2207,7 +2196,6 @@ impl EditorElement {
|
||||
relative_rows
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_line_numbers(
|
||||
&self,
|
||||
gutter_hitbox: Option<&Hitbox>,
|
||||
@@ -2423,7 +2411,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint_lines(
|
||||
&self,
|
||||
start_row: DisplayRow,
|
||||
@@ -2450,7 +2437,6 @@ impl EditorElement {
|
||||
line_elements
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_block(
|
||||
&self,
|
||||
block: &Block,
|
||||
@@ -2950,7 +2936,6 @@ impl EditorElement {
|
||||
}))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_blocks(
|
||||
&self,
|
||||
rows: Range<DisplayRow>,
|
||||
@@ -3135,7 +3120,6 @@ impl EditorElement {
|
||||
|
||||
/// Returns true if any of the blocks changed size since the previous frame. This will trigger
|
||||
/// a restart of rendering for the editor based on the new sizes.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_blocks(
|
||||
&self,
|
||||
blocks: &mut Vec<BlockLayout>,
|
||||
@@ -3179,7 +3163,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_sticky_buffer_header(
|
||||
&self,
|
||||
StickyHeaderExcerpt {
|
||||
@@ -3254,7 +3237,6 @@ impl EditorElement {
|
||||
header
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_cursor_popovers(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
@@ -3443,7 +3425,6 @@ impl EditorElement {
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_gutter_menu(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
@@ -3496,7 +3477,6 @@ impl EditorElement {
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_popovers_above_or_below_line(
|
||||
&self,
|
||||
target_position: gpui::Point<Pixels>,
|
||||
@@ -3610,7 +3590,6 @@ impl EditorElement {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_context_menu_aside(
|
||||
&self,
|
||||
y_flipped: bool,
|
||||
@@ -3806,7 +3785,6 @@ impl EditorElement {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_hover_popovers(
|
||||
&self,
|
||||
snapshot: &EditorSnapshot,
|
||||
@@ -3923,7 +3901,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_diff_hunk_controls(
|
||||
&self,
|
||||
row_range: Range<DisplayRow>,
|
||||
@@ -4008,7 +3985,6 @@ impl EditorElement {
|
||||
controls
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_signature_help(
|
||||
&self,
|
||||
hitbox: &Hitbox,
|
||||
@@ -4379,13 +4355,6 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
fn paint_gutter_diff_hunks(layout: &mut EditorLayout, window: &mut Window, cx: &mut App) {
|
||||
let is_light = cx.theme().appearance().is_light();
|
||||
|
||||
let hunk_style = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.hunk_style
|
||||
.unwrap_or_default();
|
||||
|
||||
if layout.display_hunks.is_empty() {
|
||||
return;
|
||||
}
|
||||
@@ -4446,28 +4415,7 @@ impl EditorElement {
|
||||
}),
|
||||
};
|
||||
|
||||
if let Some((hunk_bounds, mut background_color, corner_radii, secondary_status)) =
|
||||
hunk_to_paint
|
||||
{
|
||||
match hunk_style {
|
||||
GitHunkStyleSetting::Transparent | GitHunkStyleSetting::Pattern => {
|
||||
if secondary_status.has_secondary_hunk() {
|
||||
background_color =
|
||||
background_color.opacity(if is_light { 0.2 } else { 0.32 });
|
||||
}
|
||||
}
|
||||
GitHunkStyleSetting::StagedPattern
|
||||
| GitHunkStyleSetting::StagedTransparent => {
|
||||
if !secondary_status.has_secondary_hunk() {
|
||||
background_color =
|
||||
background_color.opacity(if is_light { 0.2 } else { 0.32 });
|
||||
}
|
||||
}
|
||||
GitHunkStyleSetting::StagedBorder | GitHunkStyleSetting::Border => {
|
||||
// Don't change the background color
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((hunk_bounds, background_color, corner_radii, status)) = hunk_to_paint {
|
||||
// Flatten the background color with the editor color to prevent
|
||||
// elements below transparent hunks from showing through
|
||||
let flattened_background_color = cx
|
||||
@@ -4476,13 +4424,29 @@ impl EditorElement {
|
||||
.editor_background
|
||||
.blend(background_color);
|
||||
|
||||
window.paint_quad(quad(
|
||||
hunk_bounds,
|
||||
corner_radii,
|
||||
flattened_background_color,
|
||||
Edges::default(),
|
||||
transparent_black(),
|
||||
));
|
||||
if !Self::diff_hunk_hollow(status, cx) {
|
||||
window.paint_quad(quad(
|
||||
hunk_bounds,
|
||||
corner_radii,
|
||||
flattened_background_color,
|
||||
Edges::default(),
|
||||
transparent_black(),
|
||||
));
|
||||
} else {
|
||||
let flattened_unstaged_background_color = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.editor_background
|
||||
.blend(background_color.opacity(0.3));
|
||||
|
||||
window.paint_quad(quad(
|
||||
hunk_bounds,
|
||||
corner_radii,
|
||||
flattened_unstaged_background_color,
|
||||
Edges::all(Pixels(1.0)),
|
||||
flattened_background_color,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -4676,6 +4640,7 @@ impl EditorElement {
|
||||
};
|
||||
window.set_cursor_style(cursor_style, &layout.position_map.text_hitbox);
|
||||
|
||||
self.paint_lines_background(layout, window, cx);
|
||||
let invisible_display_ranges = self.paint_highlights(layout, window);
|
||||
self.paint_lines(&invisible_display_ranges, layout, window, cx);
|
||||
self.paint_redactions(layout, window);
|
||||
@@ -4766,6 +4731,18 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
fn paint_lines_background(
|
||||
&mut self,
|
||||
layout: &mut EditorLayout,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
for (ix, line_with_invisibles) in layout.position_map.line_layouts.iter().enumerate() {
|
||||
let row = DisplayRow(layout.visible_display_row_range.start.0 + ix as u32);
|
||||
line_with_invisibles.draw_background(layout, row, layout.content_origin, window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn paint_redactions(&mut self, layout: &EditorLayout, window: &mut Window) {
|
||||
if layout.redacted_ranges.is_empty() {
|
||||
return;
|
||||
@@ -5304,7 +5281,6 @@ impl EditorElement {
|
||||
});
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn paint_highlighted_range(
|
||||
&self,
|
||||
range: Range<DisplayPoint>,
|
||||
@@ -5675,6 +5651,18 @@ impl EditorElement {
|
||||
&[run],
|
||||
)
|
||||
}
|
||||
|
||||
fn diff_hunk_hollow(status: DiffHunkStatus, cx: &mut App) -> bool {
|
||||
let unstaged = status.has_secondary_hunk();
|
||||
let unstaged_hollow = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.hunk_style
|
||||
.map_or(false, |style| {
|
||||
matches!(style, GitHunkStyleSetting::UnstagedHollow)
|
||||
});
|
||||
|
||||
unstaged == unstaged_hollow
|
||||
}
|
||||
}
|
||||
|
||||
fn header_jump_data(
|
||||
@@ -5730,7 +5718,6 @@ impl AcceptEditPredictionBinding {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint_gutter_button(
|
||||
button: IconButton,
|
||||
row: DisplayRow,
|
||||
@@ -5981,7 +5968,6 @@ impl fmt::Debug for LineFragment {
|
||||
}
|
||||
|
||||
impl LineWithInvisibles {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn from_chunks<'a>(
|
||||
chunks: impl Iterator<Item = HighlightedChunk<'a>>,
|
||||
editor_style: &EditorStyle,
|
||||
@@ -6186,7 +6172,6 @@ impl LineWithInvisibles {
|
||||
layouts
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint(
|
||||
&mut self,
|
||||
line_height: Pixels,
|
||||
@@ -6221,7 +6206,6 @@ impl LineWithInvisibles {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn draw(
|
||||
&self,
|
||||
layout: &EditorLayout,
|
||||
@@ -6265,7 +6249,35 @@ impl LineWithInvisibles {
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn draw_background(
|
||||
&self,
|
||||
layout: &EditorLayout,
|
||||
row: DisplayRow,
|
||||
content_origin: gpui::Point<Pixels>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let line_height = layout.position_map.line_height;
|
||||
let line_y = line_height
|
||||
* (row.as_f32() - layout.position_map.scroll_pixel_position.y / line_height);
|
||||
|
||||
let mut fragment_origin =
|
||||
content_origin + gpui::point(-layout.position_map.scroll_pixel_position.x, line_y);
|
||||
|
||||
for fragment in &self.fragments {
|
||||
match fragment {
|
||||
LineFragment::Text(line) => {
|
||||
line.paint_background(fragment_origin, line_height, window, cx)
|
||||
.log_err();
|
||||
fragment_origin.x += line.width;
|
||||
}
|
||||
LineFragment::Element { size, .. } => {
|
||||
fragment_origin.x += size.width;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_invisibles(
|
||||
&self,
|
||||
selection_ranges: &[Range<DisplayPoint>],
|
||||
@@ -6785,10 +6797,6 @@ impl Element for EditorElement {
|
||||
.update(cx, |editor, cx| editor.highlighted_display_rows(window, cx));
|
||||
|
||||
let is_light = cx.theme().appearance().is_light();
|
||||
let hunk_style = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.hunk_style
|
||||
.unwrap_or_default();
|
||||
|
||||
for (ix, row_info) in row_infos.iter().enumerate() {
|
||||
let Some(diff_status) = row_info.diff_status else {
|
||||
@@ -6806,76 +6814,29 @@ impl Element for EditorElement {
|
||||
}
|
||||
};
|
||||
|
||||
let unstaged = diff_status.has_secondary_hunk();
|
||||
let hunk_opacity = if is_light { 0.16 } else { 0.12 };
|
||||
let slash_width = line_height.0 / 1.5; // ~16 by default
|
||||
|
||||
let staged_highlight: LineHighlight = match hunk_style {
|
||||
GitHunkStyleSetting::Transparent
|
||||
| GitHunkStyleSetting::Pattern
|
||||
| GitHunkStyleSetting::Border => {
|
||||
solid_background(background_color.opacity(hunk_opacity)).into()
|
||||
}
|
||||
GitHunkStyleSetting::StagedPattern => {
|
||||
pattern_slash(background_color.opacity(hunk_opacity), slash_width)
|
||||
.into()
|
||||
}
|
||||
GitHunkStyleSetting::StagedTransparent => {
|
||||
solid_background(background_color.opacity(if is_light {
|
||||
0.08
|
||||
} else {
|
||||
0.04
|
||||
}))
|
||||
.into()
|
||||
}
|
||||
GitHunkStyleSetting::StagedBorder => LineHighlight {
|
||||
background: (background_color.opacity(if is_light {
|
||||
0.08
|
||||
} else {
|
||||
0.06
|
||||
}))
|
||||
.into(),
|
||||
border: Some(if is_light {
|
||||
background_color.opacity(0.48)
|
||||
} else {
|
||||
background_color.opacity(0.36)
|
||||
}),
|
||||
},
|
||||
let hollow_highlight = LineHighlight {
|
||||
background: (background_color.opacity(if is_light {
|
||||
0.08
|
||||
} else {
|
||||
0.06
|
||||
}))
|
||||
.into(),
|
||||
border: Some(if is_light {
|
||||
background_color.opacity(0.48)
|
||||
} else {
|
||||
background_color.opacity(0.36)
|
||||
}),
|
||||
};
|
||||
|
||||
let unstaged_highlight = match hunk_style {
|
||||
GitHunkStyleSetting::Transparent => {
|
||||
solid_background(background_color.opacity(if is_light {
|
||||
0.08
|
||||
} else {
|
||||
0.04
|
||||
}))
|
||||
.into()
|
||||
}
|
||||
GitHunkStyleSetting::Pattern => {
|
||||
pattern_slash(background_color.opacity(hunk_opacity), slash_width)
|
||||
.into()
|
||||
}
|
||||
GitHunkStyleSetting::Border => LineHighlight {
|
||||
background: (background_color.opacity(if is_light {
|
||||
0.08
|
||||
} else {
|
||||
0.02
|
||||
}))
|
||||
.into(),
|
||||
border: Some(background_color.opacity(0.5)),
|
||||
},
|
||||
GitHunkStyleSetting::StagedPattern
|
||||
| GitHunkStyleSetting::StagedTransparent
|
||||
| GitHunkStyleSetting::StagedBorder => {
|
||||
solid_background(background_color.opacity(hunk_opacity)).into()
|
||||
}
|
||||
};
|
||||
let filled_highlight =
|
||||
solid_background(background_color.opacity(hunk_opacity)).into();
|
||||
|
||||
let background = if unstaged {
|
||||
unstaged_highlight
|
||||
let background = if Self::diff_hunk_hollow(diff_status, cx) {
|
||||
hollow_highlight
|
||||
} else {
|
||||
staged_highlight
|
||||
filled_highlight
|
||||
};
|
||||
|
||||
highlighted_rows
|
||||
@@ -7659,7 +7620,6 @@ struct ScrollbarRangeData {
|
||||
}
|
||||
|
||||
impl ScrollbarRangeData {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
scrollbar_bounds: Bounds<Pixels>,
|
||||
letter_size: Size<Pixels>,
|
||||
@@ -9014,7 +8974,7 @@ fn diff_hunk_controls(
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let position =
|
||||
hunk_range.end.to_point(&snapshot.buffer_snapshot);
|
||||
editor.go_to_hunk_after_or_before_position(
|
||||
editor.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
position,
|
||||
Direction::Next,
|
||||
@@ -9050,7 +9010,7 @@ fn diff_hunk_controls(
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let point =
|
||||
hunk_range.start.to_point(&snapshot.buffer_snapshot);
|
||||
editor.go_to_hunk_after_or_before_position(
|
||||
editor.go_to_hunk_before_or_after_position(
|
||||
&snapshot,
|
||||
point,
|
||||
Direction::Prev,
|
||||
|
||||
@@ -370,7 +370,6 @@ impl GitBlame {
|
||||
async move {
|
||||
let Some(Blame {
|
||||
entries,
|
||||
permalinks,
|
||||
messages,
|
||||
remote_url,
|
||||
}) = blame.await?
|
||||
@@ -379,13 +378,8 @@ impl GitBlame {
|
||||
};
|
||||
|
||||
let entries = build_blame_entry_sum_tree(entries, snapshot.max_point().row);
|
||||
let commit_details = parse_commit_messages(
|
||||
messages,
|
||||
remote_url,
|
||||
&permalinks,
|
||||
provider_registry,
|
||||
)
|
||||
.await;
|
||||
let commit_details =
|
||||
parse_commit_messages(messages, remote_url, provider_registry).await;
|
||||
|
||||
anyhow::Ok(Some((entries, commit_details)))
|
||||
}
|
||||
@@ -477,7 +471,6 @@ fn build_blame_entry_sum_tree(entries: Vec<BlameEntry>, max_row: u32) -> SumTree
|
||||
async fn parse_commit_messages(
|
||||
messages: impl IntoIterator<Item = (Oid, String)>,
|
||||
remote_url: Option<String>,
|
||||
deprecated_permalinks: &HashMap<Oid, Url>,
|
||||
provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
) -> HashMap<Oid, ParsedCommitMessage> {
|
||||
let mut commit_details = HashMap::default();
|
||||
@@ -495,11 +488,7 @@ async fn parse_commit_messages(
|
||||
},
|
||||
))
|
||||
} else {
|
||||
// DEPRECATED (18 Apr 24): Sending permalinks over the wire is deprecated. Clients
|
||||
// now do the parsing. This is here for backwards compatibility, so that
|
||||
// when an old peer sends a client no `parsed_remote_url` but `deprecated_permalinks`,
|
||||
// we fall back to that.
|
||||
deprecated_permalinks.get(&oid).cloned()
|
||||
None
|
||||
};
|
||||
|
||||
let remote = parsed_remote_url
|
||||
|
||||
@@ -223,7 +223,6 @@ impl ScrollManager {
|
||||
self.anchor.scroll_position(snapshot)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn set_scroll_position(
|
||||
&mut self,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
@@ -298,7 +297,6 @@ impl ScrollManager {
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn set_anchor(
|
||||
&mut self,
|
||||
anchor: ScrollAnchor,
|
||||
|
||||
@@ -22,7 +22,6 @@ collections.workspace = true
|
||||
env_logger.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
git.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
|
||||
@@ -5,7 +5,6 @@ use client::{Client, UserStore};
|
||||
use clock::RealSystemClock;
|
||||
use collections::BTreeMap;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use git::GitHostingProviderRegistry;
|
||||
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Entity};
|
||||
use http_client::{HttpClient, Method};
|
||||
use language::LanguageRegistry;
|
||||
@@ -274,8 +273,7 @@ async fn run_evaluation(
|
||||
let repos_dir = Path::new(EVAL_REPOS_DIR);
|
||||
let db_path = Path::new(EVAL_DB_PATH);
|
||||
let api_key = std::env::var("OPENAI_API_KEY").unwrap();
|
||||
let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new());
|
||||
let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc<dyn Fs>;
|
||||
let fs = Arc::new(RealFs::new(None)) as Arc<dyn Fs>;
|
||||
let clock = Arc::new(RealSystemClock);
|
||||
let client = cx
|
||||
.update(|cx| {
|
||||
@@ -399,7 +397,6 @@ async fn run_evaluation(
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn run_eval_project(
|
||||
evaluation_project: EvaluationProject,
|
||||
user_store: &Entity<UserStore>,
|
||||
|
||||
@@ -6,8 +6,7 @@ repository = "https://github.com/zed-industries/zed"
|
||||
documentation = "https://docs.rs/zed_extension_api"
|
||||
keywords = ["zed", "extension"]
|
||||
edition.workspace = true
|
||||
# Change back to `true` when we're ready to publish v0.3.0.
|
||||
publish = false
|
||||
publish = true
|
||||
license = "Apache-2.0"
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -23,7 +23,7 @@ need to set your `crate-type` accordingly:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
zed_extension_api = "0.1.0"
|
||||
zed_extension_api = "0.3.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
@@ -63,6 +63,7 @@ Here is the compatibility of the `zed_extension_api` with versions of Zed:
|
||||
|
||||
| Zed version | `zed_extension_api` version |
|
||||
| ----------- | --------------------------- |
|
||||
| `0.178.x` | `0.0.1` - `0.3.0` |
|
||||
| `0.162.x` | `0.0.1` - `0.2.0` |
|
||||
| `0.149.x` | `0.0.1` - `0.1.0` |
|
||||
| `0.131.x` | `0.0.1` - `0.0.6` |
|
||||
|
||||
@@ -195,7 +195,6 @@ static mut EXTENSION: Option<Box<dyn Extension>> = None;
|
||||
pub static ZED_API_VERSION: [u8; 6] = *include_bytes!(concat!(env!("OUT_DIR"), "/version_bytes"));
|
||||
|
||||
mod wit {
|
||||
#![allow(clippy::too_many_arguments, clippy::missing_safety_doc)]
|
||||
|
||||
wit_bindgen::generate!({
|
||||
skip: ["init-extension"],
|
||||
|
||||
@@ -218,7 +218,6 @@ impl ExtensionStore {
|
||||
cx.global::<GlobalExtensionStore>().0.clone()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
extensions_dir: PathBuf,
|
||||
build_dir: Option<PathBuf>,
|
||||
|
||||
@@ -59,7 +59,7 @@ pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive
|
||||
|
||||
let max_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_2_0::MAX_VERSION,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => latest::MAX_VERSION,
|
||||
};
|
||||
|
||||
since_v0_0_1::MIN_VERSION..=max_version
|
||||
@@ -108,8 +108,6 @@ impl Extension {
|
||||
let _ = release_channel;
|
||||
|
||||
if version >= latest::MIN_VERSION {
|
||||
authorize_access_to_unreleased_wasm_api_version(release_channel)?;
|
||||
|
||||
let extension =
|
||||
latest::Extension::instantiate_async(store, component, latest::linker())
|
||||
.await
|
||||
|
||||
@@ -8,7 +8,6 @@ use wasmtime::component::{Linker, Resource};
|
||||
use super::latest;
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0);
|
||||
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
|
||||
@@ -80,11 +80,6 @@ impl FeatureFlag for PredictEditsNonEagerModeFeatureFlag {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GitUiFeatureFlag;
|
||||
impl FeatureFlag for GitUiFeatureFlag {
|
||||
const NAME: &'static str = "git-ui";
|
||||
}
|
||||
|
||||
pub struct Remoting {}
|
||||
impl FeatureFlag for Remoting {
|
||||
const NAME: &'static str = "remoting";
|
||||
|
||||
@@ -653,7 +653,6 @@ impl FileSearchQuery {
|
||||
}
|
||||
|
||||
impl FileFinderDelegate {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
file_finder: WeakEntity<FileFinder>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
|
||||
@@ -436,8 +436,8 @@ impl PickerDelegate for NewPathDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> SharedString {
|
||||
"Type a path...".into()
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
Some("Type a path...".into())
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
|
||||
@@ -347,12 +347,14 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> SharedString {
|
||||
if let Some(error) = self.directory_state.as_ref().and_then(|s| s.error.clone()) {
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
let text = if let Some(error) = self.directory_state.as_ref().and_then(|s| s.error.clone())
|
||||
{
|
||||
error
|
||||
} else {
|
||||
"No such file or directory".into()
|
||||
}
|
||||
};
|
||||
Some(text)
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
|
||||
@@ -11,18 +11,19 @@ use collections::HashMap;
|
||||
use git::status::StatusCode;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use git::status::TrackedStatus;
|
||||
use git::GitHostingProviderRegistry;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use git::{repository::RepoPath, status::FileStatus};
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
use ashpd::desktop::trash;
|
||||
use std::borrow::Cow;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use std::collections::HashSet;
|
||||
#[cfg(unix)]
|
||||
use std::os::fd::AsFd;
|
||||
#[cfg(unix)]
|
||||
use std::os::fd::AsRawFd;
|
||||
use util::command::new_std_command;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
@@ -137,6 +138,7 @@ pub trait Fs: Send + Sync {
|
||||
|
||||
fn home_dir(&self) -> Option<PathBuf>;
|
||||
fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<dyn GitRepository>>;
|
||||
fn git_init(&self, abs_work_directory: &Path, fallback_branch_name: String) -> Result<()>;
|
||||
fn is_fake(&self) -> bool;
|
||||
async fn is_case_sensitive(&self) -> Result<bool>;
|
||||
|
||||
@@ -247,7 +249,6 @@ impl From<MTime> for proto::Timestamp {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RealFs {
|
||||
git_hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
git_binary_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
@@ -300,14 +301,8 @@ impl FileHandle for std::fs::File {
|
||||
pub struct RealWatcher {}
|
||||
|
||||
impl RealFs {
|
||||
pub fn new(
|
||||
git_hosting_provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
git_binary_path: Option<PathBuf>,
|
||||
) -> Self {
|
||||
Self {
|
||||
git_hosting_provider_registry,
|
||||
git_binary_path,
|
||||
}
|
||||
pub fn new(git_binary_path: Option<PathBuf>) -> Self {
|
||||
Self { git_binary_path }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -770,10 +765,32 @@ impl Fs for RealFs {
|
||||
Some(Arc::new(RealGitRepository::new(
|
||||
repo,
|
||||
self.git_binary_path.clone(),
|
||||
self.git_hosting_provider_registry.clone(),
|
||||
)))
|
||||
}
|
||||
|
||||
fn git_init(&self, abs_work_directory_path: &Path, fallback_branch_name: String) -> Result<()> {
|
||||
let config = new_std_command("git")
|
||||
.current_dir(abs_work_directory_path)
|
||||
.args(&["config", "--global", "--get", "init.defaultBranch"])
|
||||
.output()?;
|
||||
|
||||
let branch_name;
|
||||
|
||||
if config.status.success() && !config.stdout.is_empty() {
|
||||
branch_name = String::from_utf8_lossy(&config.stdout);
|
||||
} else {
|
||||
branch_name = Cow::Borrowed(fallback_branch_name.as_str());
|
||||
}
|
||||
|
||||
new_std_command("git")
|
||||
.current_dir(abs_work_directory_path)
|
||||
.args(&["init", "-b"])
|
||||
.arg(branch_name.trim())
|
||||
.output()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_fake(&self) -> bool {
|
||||
false
|
||||
}
|
||||
@@ -2084,6 +2101,14 @@ impl Fs for FakeFs {
|
||||
}
|
||||
}
|
||||
|
||||
fn git_init(
|
||||
&self,
|
||||
abs_work_directory_path: &Path,
|
||||
_fallback_branch_name: String,
|
||||
) -> Result<()> {
|
||||
smol::block_on(self.create_dir(&abs_work_directory_path.join(".git")))
|
||||
}
|
||||
|
||||
fn is_fake(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
@@ -170,7 +170,6 @@ impl<'a> Matcher<'a> {
|
||||
score
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn recursive_score_match(
|
||||
&mut self,
|
||||
path: &[char],
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
use crate::commit::get_messages;
|
||||
use crate::{parse_git_remote_url, BuildCommitPermalinkParams, GitHostingProviderRegistry, Oid};
|
||||
use crate::Oid;
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::AsyncWriteExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::Write;
|
||||
use std::process::Stdio;
|
||||
use std::sync::Arc;
|
||||
use std::{ops::Range, path::Path};
|
||||
use text::Rope;
|
||||
use time::macros::format_description;
|
||||
use time::OffsetDateTime;
|
||||
use time::UtcOffset;
|
||||
use url::Url;
|
||||
|
||||
pub use git2 as libgit;
|
||||
|
||||
@@ -19,52 +17,34 @@ pub use git2 as libgit;
|
||||
pub struct Blame {
|
||||
pub entries: Vec<BlameEntry>,
|
||||
pub messages: HashMap<Oid, String>,
|
||||
pub permalinks: HashMap<Oid, Url>,
|
||||
pub remote_url: Option<String>,
|
||||
}
|
||||
|
||||
impl Blame {
|
||||
pub fn for_path(
|
||||
pub async fn for_path(
|
||||
git_binary: &Path,
|
||||
working_directory: &Path,
|
||||
path: &Path,
|
||||
content: &Rope,
|
||||
remote_url: Option<String>,
|
||||
provider_registry: Arc<GitHostingProviderRegistry>,
|
||||
) -> Result<Self> {
|
||||
let output = run_git_blame(git_binary, working_directory, path, content)?;
|
||||
let output = run_git_blame(git_binary, working_directory, path, content).await?;
|
||||
let mut entries = parse_git_blame(&output)?;
|
||||
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
|
||||
|
||||
let mut permalinks = HashMap::default();
|
||||
let mut unique_shas = HashSet::default();
|
||||
let parsed_remote_url = remote_url
|
||||
.as_deref()
|
||||
.and_then(|remote_url| parse_git_remote_url(provider_registry, remote_url));
|
||||
|
||||
for entry in entries.iter_mut() {
|
||||
unique_shas.insert(entry.sha);
|
||||
// DEPRECATED (18 Apr 24): Sending permalinks over the wire is deprecated. Clients
|
||||
// now do the parsing.
|
||||
if let Some((provider, remote)) = parsed_remote_url.as_ref() {
|
||||
permalinks.entry(entry.sha).or_insert_with(|| {
|
||||
provider.build_commit_permalink(
|
||||
remote,
|
||||
BuildCommitPermalinkParams {
|
||||
sha: entry.sha.to_string().as_str(),
|
||||
},
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let shas = unique_shas.into_iter().collect::<Vec<_>>();
|
||||
let messages =
|
||||
get_messages(working_directory, &shas).context("failed to get commit messages")?;
|
||||
let messages = get_messages(working_directory, &shas)
|
||||
.await
|
||||
.context("failed to get commit messages")?;
|
||||
|
||||
Ok(Self {
|
||||
entries,
|
||||
permalinks,
|
||||
messages,
|
||||
remote_url,
|
||||
})
|
||||
@@ -74,13 +54,13 @@ impl Blame {
|
||||
const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD";
|
||||
const GIT_BLAME_NO_PATH: &str = "fatal: no such path";
|
||||
|
||||
fn run_git_blame(
|
||||
async fn run_git_blame(
|
||||
git_binary: &Path,
|
||||
working_directory: &Path,
|
||||
path: &Path,
|
||||
contents: &Rope,
|
||||
) -> Result<String> {
|
||||
let child = util::command::new_std_command(git_binary)
|
||||
let mut child = util::command::new_smol_command(git_binary)
|
||||
.current_dir(working_directory)
|
||||
.arg("blame")
|
||||
.arg("--incremental")
|
||||
@@ -93,18 +73,19 @@ fn run_git_blame(
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
|
||||
let mut stdin = child
|
||||
let stdin = child
|
||||
.stdin
|
||||
.as_ref()
|
||||
.as_mut()
|
||||
.context("failed to get pipe to stdin of git blame command")?;
|
||||
|
||||
for chunk in contents.chunks() {
|
||||
stdin.write_all(chunk.as_bytes())?;
|
||||
stdin.write_all(chunk.as_bytes()).await?;
|
||||
}
|
||||
stdin.flush()?;
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.wait_with_output()
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
|
||||
if !output.status.success() {
|
||||
|
||||
@@ -3,20 +3,21 @@ use anyhow::{anyhow, Result};
|
||||
use collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oid, String>> {
|
||||
pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oid, String>> {
|
||||
if shas.is_empty() {
|
||||
return Ok(HashMap::default());
|
||||
}
|
||||
|
||||
const MARKER: &str = "<MARKER>";
|
||||
|
||||
let output = util::command::new_std_command("git")
|
||||
let output = util::command::new_smol_command("git")
|
||||
.current_dir(working_directory)
|
||||
.arg("show")
|
||||
.arg("-s")
|
||||
.arg(format!("--format=%B{}", MARKER))
|
||||
.args(shas.iter().map(ToString::to_string))
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
|
||||
anyhow::ensure!(
|
||||
|
||||
@@ -50,11 +50,14 @@ actions!(
|
||||
Fetch,
|
||||
Commit,
|
||||
ExpandCommitEditor,
|
||||
GenerateCommitMessage
|
||||
GenerateCommitMessage,
|
||||
Init,
|
||||
]
|
||||
);
|
||||
|
||||
action_with_deprecated_aliases!(git, RestoreFile, ["editor::RevertFile"]);
|
||||
action_with_deprecated_aliases!(git, Restore, ["editor::RevertSelectedHunks"]);
|
||||
action_with_deprecated_aliases!(git, Blame, ["editor::ToggleGitBlame"]);
|
||||
|
||||
/// The length of a Git short SHA.
|
||||
pub const SHORT_SHA_LENGTH: usize = 7;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,12 @@ mod providers;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use git::repository::GitRepository;
|
||||
use git::GitHostingProviderRegistry;
|
||||
use gpui::App;
|
||||
use url::Url;
|
||||
use util::maybe;
|
||||
|
||||
pub use crate::providers::*;
|
||||
|
||||
@@ -15,7 +18,7 @@ pub fn init(cx: &App) {
|
||||
provider_registry.register_hosting_provider(Arc::new(Chromium));
|
||||
provider_registry.register_hosting_provider(Arc::new(Codeberg));
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitee));
|
||||
provider_registry.register_hosting_provider(Arc::new(Github));
|
||||
provider_registry.register_hosting_provider(Arc::new(Github::new()));
|
||||
provider_registry.register_hosting_provider(Arc::new(Gitlab::new()));
|
||||
provider_registry.register_hosting_provider(Arc::new(Sourcehut));
|
||||
}
|
||||
@@ -34,5 +37,51 @@ pub fn register_additional_providers(
|
||||
|
||||
if let Ok(gitlab_self_hosted) = Gitlab::from_remote_url(&origin_url) {
|
||||
provider_registry.register_hosting_provider(Arc::new(gitlab_self_hosted));
|
||||
} else if let Ok(github_self_hosted) = Github::from_remote_url(&origin_url) {
|
||||
provider_registry.register_hosting_provider(Arc::new(github_self_hosted));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_host_from_git_remote_url(remote_url: &str) -> Result<String> {
|
||||
maybe!({
|
||||
if let Some(remote_url) = remote_url.strip_prefix("git@") {
|
||||
if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') {
|
||||
return Some(host.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Url::parse(&remote_url)
|
||||
.ok()
|
||||
.and_then(|remote_url| remote_url.host_str().map(|host| host.to_string()))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("URL has no host"))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::get_host_from_git_remote_url;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_get_host_from_git_remote_url() {
|
||||
let tests = [
|
||||
(
|
||||
"https://jlannister@github.com/some-org/some-repo.git",
|
||||
Some("github.com".to_string()),
|
||||
),
|
||||
(
|
||||
"git@github.com:zed-industries/zed.git",
|
||||
Some("github.com".to_string()),
|
||||
),
|
||||
(
|
||||
"git@my.super.long.subdomain.com:zed-industries/zed.git",
|
||||
Some("my.super.long.subdomain.com".to_string()),
|
||||
),
|
||||
];
|
||||
|
||||
for (remote_url, expected_host) in tests {
|
||||
let host = get_host_from_git_remote_url(remote_url).ok();
|
||||
assert_eq!(host, expected_host);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,8 @@ use git::{
|
||||
PullRequest, RemoteUrl,
|
||||
};
|
||||
|
||||
use crate::get_host_from_git_remote_url;
|
||||
|
||||
fn pull_request_number_regex() -> &'static Regex {
|
||||
static PULL_REQUEST_NUMBER_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"\(#(\d+)\)$").unwrap());
|
||||
@@ -43,9 +45,38 @@ struct User {
|
||||
pub avatar_url: String,
|
||||
}
|
||||
|
||||
pub struct Github;
|
||||
pub struct Github {
|
||||
name: String,
|
||||
base_url: Url,
|
||||
}
|
||||
|
||||
impl Github {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
name: "GitHub".to_string(),
|
||||
base_url: Url::parse("https://github.com").unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_remote_url(remote_url: &str) -> Result<Self> {
|
||||
let host = get_host_from_git_remote_url(remote_url)?;
|
||||
if host == "github.com" {
|
||||
bail!("the GitHub instance is not self-hosted");
|
||||
}
|
||||
|
||||
// TODO: detecting self hosted instances by checking whether "github" is in the url or not
|
||||
// is not very reliable. See https://github.com/zed-industries/zed/issues/26393 for more
|
||||
// information.
|
||||
if !host.contains("github") {
|
||||
bail!("not a GitHub URL");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
name: "GitHub Self-Hosted".to_string(),
|
||||
base_url: Url::parse(&format!("https://{}", host))?,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_github_commit_author(
|
||||
&self,
|
||||
repo_owner: &str,
|
||||
@@ -53,7 +84,10 @@ impl Github {
|
||||
commit: &str,
|
||||
client: &Arc<dyn HttpClient>,
|
||||
) -> Result<Option<User>> {
|
||||
let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}");
|
||||
let Some(host) = self.base_url.host_str() else {
|
||||
bail!("failed to get host from github base url");
|
||||
};
|
||||
let url = format!("https://api.{host}/repos/{repo_owner}/{repo}/commits/{commit}");
|
||||
|
||||
let mut request = Request::get(&url)
|
||||
.header("Content-Type", "application/json")
|
||||
@@ -90,15 +124,17 @@ impl Github {
|
||||
#[async_trait]
|
||||
impl GitHostingProvider for Github {
|
||||
fn name(&self) -> String {
|
||||
"GitHub".to_string()
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn base_url(&self) -> Url {
|
||||
Url::parse("https://github.com").unwrap()
|
||||
self.base_url.clone()
|
||||
}
|
||||
|
||||
fn supports_avatars(&self) -> bool {
|
||||
true
|
||||
// Avatars are not supported for self-hosted GitHub instances
|
||||
// See tracking issue: https://github.com/zed-industries/zed/issues/11043
|
||||
&self.name == "GitHub"
|
||||
}
|
||||
|
||||
fn format_line_number(&self, line: u32) -> String {
|
||||
@@ -113,7 +149,7 @@ impl GitHostingProvider for Github {
|
||||
let url = RemoteUrl::from_str(url).ok()?;
|
||||
|
||||
let host = url.host_str()?;
|
||||
if host != "github.com" {
|
||||
if host != self.base_url.host_str()? {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -203,9 +239,76 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_invalid_self_hosted_remote_url() {
|
||||
let remote_url = "git@github.com:zed-industries/zed.git";
|
||||
let github = Github::from_remote_url(remote_url);
|
||||
assert!(github.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_remote_url_ssh() {
|
||||
let remote_url = "git@github.my-enterprise.com:zed-industries/zed.git";
|
||||
let github = Github::from_remote_url(remote_url).unwrap();
|
||||
|
||||
assert!(!github.supports_avatars());
|
||||
assert_eq!(github.name, "GitHub Self-Hosted".to_string());
|
||||
assert_eq!(
|
||||
github.base_url,
|
||||
Url::parse("https://github.my-enterprise.com").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_remote_url_https() {
|
||||
let remote_url = "https://github.my-enterprise.com/zed-industries/zed.git";
|
||||
let github = Github::from_remote_url(remote_url).unwrap();
|
||||
|
||||
assert!(!github.supports_avatars());
|
||||
assert_eq!(github.name, "GitHub Self-Hosted".to_string());
|
||||
assert_eq!(
|
||||
github.base_url,
|
||||
Url::parse("https://github.my-enterprise.com").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_self_hosted_ssh_url() {
|
||||
let remote_url = "git@github.my-enterprise.com:zed-industries/zed.git";
|
||||
let parsed_remote = Github::from_remote_url(remote_url)
|
||||
.unwrap()
|
||||
.parse_remote_url(remote_url)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_self_hosted_https_url_with_subgroup() {
|
||||
let remote_url = "https://github.my-enterprise.com/zed-industries/zed.git";
|
||||
let parsed_remote = Github::from_remote_url(remote_url)
|
||||
.unwrap()
|
||||
.parse_remote_url(remote_url)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
parsed_remote,
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Github
|
||||
let parsed_remote = Github::new()
|
||||
.parse_remote_url("git@github.com:zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
@@ -220,7 +323,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url() {
|
||||
let parsed_remote = Github
|
||||
let parsed_remote = Github::new()
|
||||
.parse_remote_url("https://github.com/zed-industries/zed.git")
|
||||
.unwrap();
|
||||
|
||||
@@ -235,7 +338,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_https_url_with_username() {
|
||||
let parsed_remote = Github
|
||||
let parsed_remote = Github::new()
|
||||
.parse_remote_url("https://jlannister@github.com/some-org/some-repo.git")
|
||||
.unwrap();
|
||||
|
||||
@@ -254,7 +357,7 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
};
|
||||
let permalink = Github.build_permalink(
|
||||
let permalink = Github::new().build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
@@ -269,7 +372,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink() {
|
||||
let permalink = Github.build_permalink(
|
||||
let permalink = Github::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
@@ -287,7 +390,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_with_single_line_selection() {
|
||||
let permalink = Github.build_permalink(
|
||||
let permalink = Github::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
@@ -305,7 +408,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_build_github_permalink_with_multi_line_selection() {
|
||||
let permalink = Github.build_permalink(
|
||||
let permalink = Github::new().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
@@ -328,8 +431,9 @@ mod tests {
|
||||
repo: "zed".into(),
|
||||
};
|
||||
|
||||
let github = Github::new();
|
||||
let message = "This does not contain a pull request";
|
||||
assert!(Github.extract_pull_request(&remote, message).is_none());
|
||||
assert!(github.extract_pull_request(&remote, message).is_none());
|
||||
|
||||
// Pull request number at end of first line
|
||||
let message = indoc! {r#"
|
||||
@@ -344,7 +448,7 @@ mod tests {
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
Github
|
||||
github
|
||||
.extract_pull_request(&remote, &message)
|
||||
.unwrap()
|
||||
.url
|
||||
@@ -359,6 +463,6 @@ mod tests {
|
||||
See the original PR, this is a fix.
|
||||
"#
|
||||
};
|
||||
assert_eq!(Github.extract_pull_request(&remote, &message), None);
|
||||
assert_eq!(github.extract_pull_request(&remote, &message), None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use anyhow::{bail, Result};
|
||||
use url::Url;
|
||||
use util::maybe;
|
||||
|
||||
use git::{
|
||||
BuildCommitPermalinkParams, BuildPermalinkParams, GitHostingProvider, ParsedGitRemote,
|
||||
RemoteUrl,
|
||||
};
|
||||
|
||||
use crate::get_host_from_git_remote_url;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Gitlab {
|
||||
name: String,
|
||||
@@ -24,19 +25,14 @@ impl Gitlab {
|
||||
}
|
||||
|
||||
pub fn from_remote_url(remote_url: &str) -> Result<Self> {
|
||||
let host = maybe!({
|
||||
if let Some(remote_url) = remote_url.strip_prefix("git@") {
|
||||
if let Some((host, _)) = remote_url.trim_start_matches("git@").split_once(':') {
|
||||
return Some(host.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Url::parse(&remote_url)
|
||||
.ok()
|
||||
.and_then(|remote_url| remote_url.host_str().map(|host| host.to_string()))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("URL has no host"))?;
|
||||
let host = get_host_from_git_remote_url(remote_url)?;
|
||||
if host == "gitlab.com" {
|
||||
bail!("the GitLab instance is not self-hosted");
|
||||
}
|
||||
|
||||
// TODO: detecting self hosted instances by checking whether "gitlab" is in the url or not
|
||||
// is not very reliable. See https://github.com/zed-industries/zed/issues/26393 for more
|
||||
// information.
|
||||
if !host.contains("gitlab") {
|
||||
bail!("not a GitLab URL");
|
||||
}
|
||||
@@ -130,6 +126,13 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_invalid_self_hosted_remote_url() {
|
||||
let remote_url = "https://gitlab.com/zed-industries/zed.git";
|
||||
let github = Gitlab::from_remote_url(remote_url);
|
||||
assert!(github.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_remote_url_given_ssh_url() {
|
||||
let parsed_remote = Gitlab::new()
|
||||
|
||||
@@ -18,13 +18,15 @@ test-support = ["multi_buffer/test-support"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
askpass.workspace= true
|
||||
askpass.workspace = true
|
||||
assistant_settings.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
component.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
git.workspace = true
|
||||
@@ -37,6 +39,7 @@ linkme.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
notifications.workspace = true
|
||||
panel.workspace = true
|
||||
picker.workspace = true
|
||||
postage.workspace = true
|
||||
@@ -51,6 +54,7 @@ strum.workspace = true
|
||||
telemetry.workspace = true
|
||||
theme.workspace = true
|
||||
time.workspace = true
|
||||
time_format.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
use anyhow::{anyhow, Context as _};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
|
||||
use git::repository::Branch;
|
||||
use gpui::{
|
||||
rems, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
|
||||
Task, Window,
|
||||
InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render,
|
||||
SharedString, Styled, Subscription, Task, Window,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::git::Repository;
|
||||
use std::sync::Arc;
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing, PopoverMenuHandle};
|
||||
use time::OffsetDateTime;
|
||||
use time_format::format_local_timestamp;
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
|
||||
use util::ResultExt;
|
||||
use workspace::notifications::DetachAndPromptErr;
|
||||
use workspace::{ModalView, Workspace};
|
||||
@@ -51,7 +53,7 @@ pub fn open(
|
||||
let repository = workspace.project().read(cx).active_repository(cx).clone();
|
||||
let style = BranchListStyle::Modal;
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
BranchList::new(repository, style, 34., window, cx)
|
||||
BranchList::new(repository, style, rems(34.), window, cx)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -61,7 +63,7 @@ pub fn popover(
|
||||
cx: &mut App,
|
||||
) -> Entity<BranchList> {
|
||||
cx.new(|cx| {
|
||||
let list = BranchList::new(repository, BranchListStyle::Popover, 15., window, cx);
|
||||
let list = BranchList::new(repository, BranchListStyle::Popover, rems(20.), window, cx);
|
||||
list.focus_handle(cx).focus(window);
|
||||
list
|
||||
})
|
||||
@@ -74,8 +76,7 @@ enum BranchListStyle {
|
||||
}
|
||||
|
||||
pub struct BranchList {
|
||||
rem_width: f32,
|
||||
pub popover_handle: PopoverMenuHandle<Self>,
|
||||
width: Rems,
|
||||
pub picker: Entity<Picker<BranchListDelegate>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
@@ -84,20 +85,26 @@ impl BranchList {
|
||||
fn new(
|
||||
repository: Option<Entity<Repository>>,
|
||||
style: BranchListStyle,
|
||||
rem_width: f32,
|
||||
width: Rems,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let popover_handle = PopoverMenuHandle::default();
|
||||
let all_branches_request = repository
|
||||
.clone()
|
||||
.map(|repository| repository.read(cx).branches());
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
let all_branches = all_branches_request
|
||||
let mut all_branches = all_branches_request
|
||||
.context("No active repository")?
|
||||
.await??;
|
||||
|
||||
all_branches.sort_by_key(|branch| {
|
||||
branch
|
||||
.most_recent_commit
|
||||
.as_ref()
|
||||
.map(|commit| 0 - commit.commit_timestamp)
|
||||
});
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.picker.update(cx, |picker, cx| {
|
||||
picker.delegate.all_branches = Some(all_branches);
|
||||
@@ -109,7 +116,7 @@ impl BranchList {
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
let delegate = BranchListDelegate::new(repository.clone(), style, 20);
|
||||
let delegate = BranchListDelegate::new(repository.clone(), style);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
let _subscription = cx.subscribe(&picker, |_, _, _, cx| {
|
||||
@@ -118,11 +125,20 @@ impl BranchList {
|
||||
|
||||
Self {
|
||||
picker,
|
||||
rem_width,
|
||||
popover_handle,
|
||||
width,
|
||||
_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_modifiers_changed(
|
||||
&mut self,
|
||||
ev: &ModifiersChangedEvent,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.picker
|
||||
.update(cx, |picker, _| picker.delegate.modifiers = ev.modifiers)
|
||||
}
|
||||
}
|
||||
impl ModalView for BranchList {}
|
||||
impl EventEmitter<DismissEvent> for BranchList {}
|
||||
@@ -136,7 +152,8 @@ impl Focusable for BranchList {
|
||||
impl Render for BranchList {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.w(rems(self.rem_width))
|
||||
.w(self.width)
|
||||
.on_modifiers_changed(cx.listener(Self::handle_modifiers_changed))
|
||||
.child(self.picker.clone())
|
||||
.on_mouse_down_out({
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
@@ -149,20 +166,10 @@ impl Render for BranchList {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum BranchEntry {
|
||||
Branch(StringMatch),
|
||||
History(String),
|
||||
NewBranch { name: String },
|
||||
}
|
||||
|
||||
impl BranchEntry {
|
||||
fn name(&self) -> &str {
|
||||
match self {
|
||||
Self::Branch(branch) => &branch.string,
|
||||
Self::History(branch) => &branch,
|
||||
Self::NewBranch { name } => &name,
|
||||
}
|
||||
}
|
||||
struct BranchEntry {
|
||||
branch: Branch,
|
||||
positions: Vec<usize>,
|
||||
is_new: bool,
|
||||
}
|
||||
|
||||
pub struct BranchListDelegate {
|
||||
@@ -172,16 +179,11 @@ pub struct BranchListDelegate {
|
||||
style: BranchListStyle,
|
||||
selected_index: usize,
|
||||
last_query: String,
|
||||
/// Max length of branch name before we truncate it and add a trailing `...`.
|
||||
branch_name_trailoff_after: usize,
|
||||
modifiers: Modifiers,
|
||||
}
|
||||
|
||||
impl BranchListDelegate {
|
||||
fn new(
|
||||
repo: Option<Entity<Repository>>,
|
||||
style: BranchListStyle,
|
||||
branch_name_trailoff_after: usize,
|
||||
) -> Self {
|
||||
fn new(repo: Option<Entity<Repository>>, style: BranchListStyle) -> Self {
|
||||
Self {
|
||||
matches: vec![],
|
||||
repo,
|
||||
@@ -189,15 +191,30 @@ impl BranchListDelegate {
|
||||
all_branches: None,
|
||||
selected_index: 0,
|
||||
last_query: Default::default(),
|
||||
branch_name_trailoff_after,
|
||||
modifiers: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn branch_count(&self) -> usize {
|
||||
self.matches
|
||||
.iter()
|
||||
.filter(|item| matches!(item, BranchEntry::Branch(_)))
|
||||
.count()
|
||||
fn create_branch(
|
||||
&self,
|
||||
new_branch_name: SharedString,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
let Some(repo) = self.repo.clone() else {
|
||||
return;
|
||||
};
|
||||
cx.spawn(|_, cx| async move {
|
||||
cx.update(|cx| repo.read(cx).create_branch(new_branch_name.to_string()))?
|
||||
.await??;
|
||||
cx.update(|cx| repo.read(cx).change_branch(new_branch_name.to_string()))?
|
||||
.await??;
|
||||
Ok(())
|
||||
})
|
||||
.detach_and_prompt_err("Failed to create branch", window, cx, |e, _, _| {
|
||||
Some(e.to_string())
|
||||
});
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,37 +248,28 @@ impl PickerDelegate for BranchListDelegate {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let Some(mut all_branches) = self.all_branches.clone() else {
|
||||
let Some(all_branches) = self.all_branches.clone() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
const RECENT_BRANCHES_COUNT: usize = 10;
|
||||
cx.spawn_in(window, move |picker, mut cx| async move {
|
||||
const RECENT_BRANCHES_COUNT: usize = 10;
|
||||
if query.is_empty() {
|
||||
if all_branches.len() > RECENT_BRANCHES_COUNT {
|
||||
// Truncate list of recent branches
|
||||
// Do a partial sort to show recent-ish branches first.
|
||||
all_branches.select_nth_unstable_by(RECENT_BRANCHES_COUNT - 1, |lhs, rhs| {
|
||||
rhs.priority_key().cmp(&lhs.priority_key())
|
||||
});
|
||||
all_branches.truncate(RECENT_BRANCHES_COUNT);
|
||||
}
|
||||
all_branches.sort_unstable_by(|lhs, rhs| {
|
||||
rhs.is_head.cmp(&lhs.is_head).then(lhs.name.cmp(&rhs.name))
|
||||
});
|
||||
}
|
||||
|
||||
let candidates = all_branches
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(ix, command)| StringMatchCandidate::new(ix, &command.name))
|
||||
.collect::<Vec<StringMatchCandidate>>();
|
||||
let matches: Vec<BranchEntry> = if query.is_empty() {
|
||||
candidates
|
||||
let mut matches: Vec<BranchEntry> = if query.is_empty() {
|
||||
all_branches
|
||||
.into_iter()
|
||||
.map(|candidate| BranchEntry::History(candidate.string))
|
||||
.take(RECENT_BRANCHES_COUNT)
|
||||
.map(|branch| BranchEntry {
|
||||
branch,
|
||||
positions: Vec::new(),
|
||||
is_new: false,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
let candidates = all_branches
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(ix, command)| StringMatchCandidate::new(ix, &command.name.clone()))
|
||||
.collect::<Vec<StringMatchCandidate>>();
|
||||
fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
@@ -273,20 +281,35 @@ impl PickerDelegate for BranchListDelegate {
|
||||
.await
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(BranchEntry::Branch)
|
||||
.map(|candidate| BranchEntry {
|
||||
branch: all_branches[candidate.candidate_id].clone(),
|
||||
positions: candidate.positions,
|
||||
is_new: false,
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
picker
|
||||
.update(&mut cx, |picker, _| {
|
||||
#[allow(clippy::nonminimal_bool)]
|
||||
if !query.is_empty()
|
||||
&& !matches
|
||||
.first()
|
||||
.is_some_and(|entry| entry.branch.name == query)
|
||||
{
|
||||
matches.push(BranchEntry {
|
||||
branch: Branch {
|
||||
name: query.clone().into(),
|
||||
is_head: false,
|
||||
upstream: None,
|
||||
most_recent_commit: None,
|
||||
},
|
||||
positions: Vec::new(),
|
||||
is_new: true,
|
||||
})
|
||||
}
|
||||
let delegate = &mut picker.delegate;
|
||||
delegate.matches = matches;
|
||||
if delegate.matches.is_empty() {
|
||||
if !query.is_empty() {
|
||||
delegate.matches.push(BranchEntry::NewBranch {
|
||||
name: query.trim().replace(' ', "-"),
|
||||
});
|
||||
}
|
||||
|
||||
delegate.selected_index = 0;
|
||||
} else {
|
||||
delegate.selected_index =
|
||||
@@ -298,10 +321,14 @@ impl PickerDelegate for BranchListDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(branch) = self.matches.get(self.selected_index()) else {
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(entry) = self.matches.get(self.selected_index()) else {
|
||||
return;
|
||||
};
|
||||
if entry.is_new {
|
||||
self.create_branch(entry.branch.name.clone(), window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let current_branch = self.repo.as_ref().map(|repo| {
|
||||
repo.update(cx, |repo, _| {
|
||||
@@ -311,14 +338,14 @@ impl PickerDelegate for BranchListDelegate {
|
||||
|
||||
if current_branch
|
||||
.flatten()
|
||||
.is_some_and(|current_branch| current_branch == branch.name())
|
||||
.is_some_and(|current_branch| current_branch == entry.branch.name)
|
||||
{
|
||||
cx.emit(DismissEvent);
|
||||
return;
|
||||
}
|
||||
|
||||
cx.spawn_in(window, {
|
||||
let branch = branch.clone();
|
||||
let branch = entry.branch.clone();
|
||||
|picker, mut cx| async move {
|
||||
let branch_change_task = picker.update(&mut cx, |this, cx| {
|
||||
let repo = this
|
||||
@@ -331,22 +358,8 @@ impl PickerDelegate for BranchListDelegate {
|
||||
let cx = cx.to_async();
|
||||
|
||||
anyhow::Ok(async move {
|
||||
match branch {
|
||||
BranchEntry::Branch(StringMatch {
|
||||
string: branch_name,
|
||||
..
|
||||
})
|
||||
| BranchEntry::History(branch_name) => {
|
||||
cx.update(|cx| repo.read(cx).change_branch(branch_name))?
|
||||
.await?
|
||||
}
|
||||
BranchEntry::NewBranch { name: branch_name } => {
|
||||
cx.update(|cx| repo.read(cx).create_branch(branch_name.clone()))?
|
||||
.await??;
|
||||
cx.update(|cx| repo.read(cx).change_branch(branch_name))?
|
||||
.await?
|
||||
}
|
||||
}
|
||||
cx.update(|cx| repo.read(cx).change_branch(branch.name.to_string()))?
|
||||
.await?
|
||||
})
|
||||
})??;
|
||||
|
||||
@@ -366,16 +379,35 @@ impl PickerDelegate for BranchListDelegate {
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn render_header(&self, _: &mut Window, _cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
|
||||
None
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let hit = &self.matches[ix];
|
||||
let shortened_branch_name =
|
||||
util::truncate_and_trailoff(&hit.name(), self.branch_name_trailoff_after);
|
||||
let entry = &self.matches[ix];
|
||||
|
||||
let (commit_time, subject) = entry
|
||||
.branch
|
||||
.most_recent_commit
|
||||
.as_ref()
|
||||
.map(|commit| {
|
||||
let subject = commit.subject.clone();
|
||||
let commit_time = OffsetDateTime::from_unix_timestamp(commit.commit_timestamp)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc());
|
||||
let formatted_time = format_local_timestamp(
|
||||
commit_time,
|
||||
OffsetDateTime::now_utc(),
|
||||
time_format::TimestampFormat::Relative,
|
||||
);
|
||||
(Some(formatted_time), Some(subject))
|
||||
})
|
||||
.unwrap_or_else(|| (None, None));
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(format!("vcs-menu-{ix}")))
|
||||
@@ -386,29 +418,68 @@ impl PickerDelegate for BranchListDelegate {
|
||||
})
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.when(matches!(hit, BranchEntry::History(_)), |el| {
|
||||
el.end_slot(
|
||||
Icon::new(IconName::HistoryRerun)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
})
|
||||
.map(|el| match hit {
|
||||
BranchEntry::Branch(branch) => {
|
||||
let highlights: Vec<_> = branch
|
||||
.positions
|
||||
.iter()
|
||||
.filter(|index| index < &&self.branch_name_trailoff_after)
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
el.child(HighlightedLabel::new(shortened_branch_name, highlights))
|
||||
}
|
||||
BranchEntry::History(_) => el.child(Label::new(shortened_branch_name)),
|
||||
BranchEntry::NewBranch { name } => {
|
||||
el.child(Label::new(format!("Create branch '{name}'")))
|
||||
}
|
||||
}),
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.flex_shrink()
|
||||
.overflow_x_hidden()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(div().flex_shrink().overflow_x_hidden().child(
|
||||
if entry.is_new {
|
||||
Label::new(format!(
|
||||
"Create branch \"{}\"…",
|
||||
entry.branch.name
|
||||
))
|
||||
.single_line()
|
||||
.into_any_element()
|
||||
} else {
|
||||
HighlightedLabel::new(
|
||||
entry.branch.name.clone(),
|
||||
entry.positions.clone(),
|
||||
)
|
||||
.truncate()
|
||||
.into_any_element()
|
||||
},
|
||||
))
|
||||
.when_some(commit_time, |el, commit_time| {
|
||||
el.child(
|
||||
Label::new(commit_time)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.into_element(),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.when(self.style == BranchListStyle::Modal, |el| {
|
||||
el.child(div().max_w_96().child({
|
||||
let message = if entry.is_new {
|
||||
if let Some(current_branch) =
|
||||
self.repo.as_ref().and_then(|repo| {
|
||||
repo.read(cx).current_branch().map(|b| b.name.clone())
|
||||
})
|
||||
{
|
||||
format!("based off {}", current_branch)
|
||||
} else {
|
||||
"based off the current branch".to_string()
|
||||
}
|
||||
} else {
|
||||
subject.unwrap_or("no commits found".into()).to_string()
|
||||
};
|
||||
Label::new(message)
|
||||
.size(LabelSize::Small)
|
||||
.truncate()
|
||||
.color(Color::Muted)
|
||||
}))
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user