Compare commits

...

132 Commits

Author SHA1 Message Date
Richard Feldman
1ff8521612 wip 2025-03-20 09:36:42 -04:00
Richard Feldman
ca22d5d4a3 Add shell_parser crate 2025-03-19 22:19:16 -04:00
renovate[bot]
1cf252f8eb Update Rust crate semver to v1.0.26 (#27143)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [semver](https://redirect.github.com/dtolnay/semver) |
workspace.dependencies | patch | `1.0.25` -> `1.0.26` |

---

### Release Notes

<details>
<summary>dtolnay/semver (semver)</summary>

###
[`v1.0.26`](https://redirect.github.com/dtolnay/semver/releases/tag/1.0.26)

[Compare
Source](https://redirect.github.com/dtolnay/semver/compare/1.0.25...1.0.26)

-   Documentation improvements

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-20 01:20:45 +00:00
Julia Ryan
e46c72f4a8 nix: Add nightly build job with cachix (#27014)
I'll be using this to `nix run github:zed-industries/zed/nightly` and
get an up-to-date and cached nightly build.

It'll also serve as a way to warn me when the nix build is broken,
rather than having to wait for users to report it.

Eventually and depending on the build time of the nix builds, we may
want to consider putting a nix build in CI (#17458) to prevent
breakages, but for now a best-effort nightly build that doesn't block
the job if it fails is a good start.

Resolve #19937

Release Notes:

- N/A
2025-03-20 00:16:06 +00:00
renovate[bot]
63f656faae Update Rust crate async-compression to v0.4.21 (#27122)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[async-compression](https://redirect.github.com/Nullus157/async-compression)
| workspace.dependencies | patch | `0.4.20` -> `0.4.21` |

---

### Release Notes

<details>
<summary>Nullus157/async-compression (async-compression)</summary>

###
[`v0.4.21`](https://redirect.github.com/Nullus157/async-compression/blob/HEAD/CHANGELOG.md#0421---2025-03-15)

[Compare
Source](https://redirect.github.com/Nullus157/async-compression/compare/v0.4.20...v0.4.21)

##### Fixed

- When flate encoding, do not mark internal state as flushed if it ran
out of buffer space.
- Add debug assertion in `produce` method to check buffer capacity in
implementations for `BufWriter`.

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 18:36:27 -04:00
renovate[bot]
31b8c36479 Update Rust crate async-std to v1.13.1 (#27127)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [async-std](https://async.rs)
([source](https://redirect.github.com/async-rs/async-std)) |
dependencies | patch | `1.13.0` -> `1.13.1` |

---

### Release Notes

<details>
<summary>async-rs/async-std (async-std)</summary>

###
[`v1.13.1`](https://redirect.github.com/async-rs/async-std/blob/HEAD/CHANGELOG.md#1131---2025-02-21)

[Compare
Source](https://redirect.github.com/async-rs/async-std/compare/v1.13.0...v1.13.1)

`async-std` has officially been discontinued. We recommend that all
users and
libraries migrate to the excellent
[`smol`](https://redirect.github.com/smol-rs/smol/)
project.

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 18:35:56 -04:00
Agus Zubiaga
dfdca540ec assistant2: Handle empty tool results by providing placeholder text (#27130)
This is surprising, but the Anthropic API returns a 400 if a tool output
is an empty string because it thinks we're attaching a `tool use`
without a corresponding `tool result`, but we are not, it's just empty
(which seems totally reasonable) 🙃

Release Notes:

- N/A
2025-03-19 22:30:49 +00:00
renovate[bot]
14c036931d Update Rust crate async-trait to v0.1.88 (#27128)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [async-trait](https://redirect.github.com/dtolnay/async-trait) |
workspace.dependencies | patch | `0.1.87` -> `0.1.88` |

---

### Release Notes

<details>
<summary>dtolnay/async-trait (async-trait)</summary>

###
[`v0.1.88`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.88)

[Compare
Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.87...0.1.88)

- Fix lifetime bounding on generic parameters that have cfg
([#&#8203;289](https://redirect.github.com/dtolnay/async-trait/issues/289))

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 22:17:00 +00:00
Mikayla Maki
5387ae9ed8 Add documentation for secondary modifier (#27129)
Follow up to: https://github.com/zed-industries/zed/pull/26390

Release Notes:

- N/A
2025-03-19 22:05:33 +00:00
Angelo Verlain Shema
c30fb5f1ec Use shell script language for APKBUILD files (#27099)
`APKBUILD` files are similar to `PKGBUILD` used by arch linux, but are
used to build alpine linux packages:
https://wiki.alpinelinux.org/wiki/APKBUILD_Reference

Release Notes:

- Added recognition for `APKBUILD` files as "Shell Script".
2025-03-19 22:00:44 +00:00
renovate[bot]
f7e2b7b679 Update actions/upload-artifact digest to ea165f8 (#27115)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[actions/upload-artifact](https://redirect.github.com/actions/upload-artifact)
| action | digest | `4cec3d8` -> `ea165f8` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:55:06 -04:00
renovate[bot]
b3bf3e2d53 Update cloudflare/wrangler-action digest to da0e0df (#27116)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[cloudflare/wrangler-action](https://redirect.github.com/cloudflare/wrangler-action)
| action | digest | `392082e` -> `da0e0df` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:54:25 -04:00
renovate[bot]
1cc59b317c Update actions/setup-node digest to cdca736 (#27108)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [actions/setup-node](https://redirect.github.com/actions/setup-node) |
action | digest | `1d0ff46` -> `cdca736` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:52:19 -04:00
Marshall Bowers
efd3f8a8f1 assistant2: Add initial concept of profiles (#27123)
This PR adds the initial concept of agent profiles to Assistant 2.

Right now these are just collections of tools that can quickly be
enabled together:


https://github.com/user-attachments/assets/7c7f9cc8-a5e5-492f-96f7-79697bbf3d72

There are currently two profiles:

- `Read-only` - Consists only of tools that do not perform writes.
- `Code Writer` - Consists of all tools for writing code, with the
exception of the `lua-interpreter`.

Release Notes:

- N/A
2025-03-19 21:48:14 +00:00
Marshall Bowers
930dba4a7f Upgrade thiserror to v2.0 (#27117)
This PR upgrades `thiserror` to v2.0.

We were still on v1.0, but a number of our dependencies have already
moved to v2.0.

Release Notes:

- N/A
2025-03-19 20:47:38 +00:00
renovate[bot]
7cfd919523 Pin actions/checkout action to 11bd719 (#27107)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [actions/checkout](https://redirect.github.com/actions/checkout) |
action | pinDigest | -> `11bd719` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 16:46:51 -04:00
Peter Tripp
edd1b48e7c ci: Send emails for weekly release (#27102)
Release Notes:

- N/A
2025-03-19 16:16:34 -04:00
Michael Sloan
3ec69a5bc0 Make getting keybinding for display more efficient (#27046)
No longer iterates all the matching bindings, and no longer clones the
result.

Release Notes:

- N/A
2025-03-19 20:15:33 +00:00
Antonio Scandurra
33faa66e35 Start on a Git-based review flow (#27103)
Release Notes:

- N/A

---------

Co-authored-by: Nathan Sobo <nathan@zed.dev>
2025-03-19 19:00:21 +00:00
Marshall Bowers
68262fe7e4 theme: Add fallback colors for version_control.<variant> properties (#27104)
This PR adds fallback colors for the `version_control.<variant>` theme
properties.

This fixes the colors when themes do not provide the properties.

Related to  https://github.com/zed-industries/zed/pull/26951.

Release Notes:

- Added fallback colors for the `version_control.<variant>` theme
properties.
2025-03-19 18:48:35 +00:00
Joseph T. Lyons
2491426be7 Fix release notes API call with heredoc syntax (#27096)
Release Notes:

- N/A
2025-03-19 13:50:46 -04:00
Marshall Bowers
4487dc1064 assistant2: Add a button to open the extensions view to install more context servers (#27095)
This PR adds a new button in the Assistant configuration view to open
the extensions view pre-filtered to extensions that provide context
servers.


https://github.com/user-attachments/assets/3bc77507-c8b8-4bc6-8a17-ab5d8b3b7c8a

Release Notes:

- N/A
2025-03-19 17:48:42 +00:00
Piotr Osiewicz
e03edc2a76 debugger: Do not allow setting breakpoints in buffers without file storage (#27094)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 18:40:31 +01:00
Marshall Bowers
d722067000 extensions_ui: Add ability to open the extensions view with a pre-selected filter (#27093)
This PR adds the ability to open the extensions view via the `zed:
extensions` action with a pre-selected filter.

The "Install Themes" and "Install Icon Themes" buttons in their
respective selectors take advantage of this to set the filter when
opening the view:


https://github.com/user-attachments/assets/2e345c0f-418a-47b6-811e-cabae6c616d1

Release Notes:

- N/A
2025-03-19 17:26:46 +00:00
Kirill Bulatov
d51cd15e4d Remove an unused field in Diagnostic from zed.proto (#27091)
Release Notes:

- N/A
2025-03-19 17:15:43 +00:00
loczek
ef14bc8e76 docs: Add better snippets documentation (#26853)
Improved snippets docs

Release Notes:

- N/A
2025-03-19 18:05:05 +01:00
Marshall Bowers
9fe243efa5 gpui: Update doc comment for App::new (#27089)
This PR updates the doc comment for the `App::new` method.

Release Notes:

- N/A
2025-03-19 16:51:19 +00:00
Max Brunsfeld
74a39c7263 Make FakeGitRepository behave more like a real git repository (#26961)
This PR reworks the `FakeGitRepository` type that we use for testing git
interactions, to make it more realistic. In particular, the `status`
method now derives the Git status from the differences between HEAD, the
index, and the working copy. This way, if you modify a file in the
`FakeFs`, the Git repository's `status` method will reflect that
modification.

Release Notes:

- N/A

---------

Co-authored-by: Junkui Zhang <364772080@qq.com>
2025-03-19 16:04:27 +00:00
Agus Zubiaga
5f398071b2 assistant2: Skip tool uses without a matching tool result (#27082)
Anthropic API doesn't allow `tool_use` messages without a corresponding
`tool_result`, so we'll skip those when building a request. I'll
separately investigate why we are sending request before the tool result
as that might lead to separate issues, but that might take a while and
this is currently very frustrating.

Release Notes:

- N/A
2025-03-19 15:54:57 +00:00
Marshall Bowers
410a942d57 assistant2: Add ability to start and stop context servers (#27080)
This PR adds the ability to start and stop context servers from within
the configuration view in the Assistant panel:


https://github.com/user-attachments/assets/93c3a7cb-d799-4286-88ba-c13cc26e959a

Release Notes:

- N/A
2025-03-19 15:37:48 +00:00
Joseph T. Lyons
06ffdc6791 Bump Zed to v0.180 (#27083)
Release Notes:

-N/A
2025-03-19 11:33:30 -04:00
Marshall Bowers
394215599a assistant2: Fix broken merge (#27081)
This PR fixes a broken merge caused by
https://github.com/zed-industries/zed/pull/26987 landing after
https://github.com/zed-industries/zed/pull/26758.

Release Notes:

- N/A
2025-03-19 15:26:19 +00:00
Richard Feldman
e8a40085de Allow tools to read unsaved buffers (#26987)
If the tool asks to read a path, we don't need to verify whether that
path exists on disk; an unsaved buffer with that path is fine.

Release Notes:

- N/A
2025-03-19 14:59:10 +00:00
Richard Feldman
6303751325 Record token usage telemetry (#26962)
<img width="1103" alt="Screenshot 2025-03-17 at 9 47 32 PM"
src="https://github.com/user-attachments/assets/947cf33d-4464-4305-8ff0-3630529d2f81"
/>


Release Notes:

- N/A
2025-03-19 10:47:46 -04:00
Antonio Scandurra
3edf930007 Revert "Start tracking edits performed by the agent" (#27077)
Reverts zed-industries/zed#27064
2025-03-19 15:33:08 +01:00
Jakub Čermák
584a70ca5e Refactor Git panel styling & status colors for consistency (#26951)
Closes #26847

Release Notes:

- Updated Git panel background to use panel_background instead of
ElevationIndex::Surface.bg(cx) for consistency with other panels.
- Removed redundant GitStatusColors struct from status.rs and refactored
to use existing theme colors.
- Adjusted Color enum mappings in color.rs to reference
version_control_* colors instead of status() for better alignment with
the theme system.
- Cleaned up unused or redundant code.
2025-03-19 10:26:36 -04:00
Smit Barmase
2230f3b09d editor: Preserve expand excerpt down button position (#27058)
When you press the "Expand Excerpt Down" button, the editor will scroll
up by the same amount to keep the button in same place. This allows you
to expand the excerpt rapidly without moving your mouse.

Before:


https://github.com/user-attachments/assets/376350ac-6f21-4ce0-a383-b2c9ca4f45bb

After:


https://github.com/user-attachments/assets/4fba4173-5f01-4220-990a-65820ac40cf5

Release Notes:

- Improved "Expand Excerpt Down" so the button stays in place, allowing
rapid expansion without moving the mouse.
2025-03-19 19:54:52 +05:30
5brian
84a8d48178 vim: Fix space not handling non-ascii characters (#27053)
Closes #26806

Changes: Clips the new point with `Bias::Right` like in
`saturating_right`

Release Notes:

- vim: Fixed `space` not handling non-ascii characters
2025-03-19 07:28:50 -06:00
Antonio Scandurra
ac5dafc6b2 Start tracking edits performed by the agent (#27064)
Release Notes:

- N/A

---------

Co-authored-by: Danilo Leal <daniloleal09@gmail.com>
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-19 13:07:25 +00:00
Piotr Osiewicz
23686aa394 debugger: Do not use Disclosure for attach button (#27068)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 14:01:33 +01:00
Danilo Leal
3874d315ec assistant2: Adjust text and padding alignment between messages (#27067)
Ensuring that text between the "you" messages align with text in the
assistant response. This also creates a nice subtle hierarchy effect
where the "you" message card is wider than the message, making it
slightly easier to tell them apart.

<img
src="https://github.com/user-attachments/assets/616c1776-ca51-454e-9d52-e480bf26c843"
width="600px" />

Release Notes:

- N/A
2025-03-19 09:43:33 -03:00
Agus Zubiaga
1d33bfde37 assistant edit tool: Replace with flexible indentation (#27039)
Sometimes the model produces SEARCH queries that don't match the
indentation of the source file exactly.

When we can't find an exact match, we'll now attempt to match the lines
while being more flexible about the leading whitespace as long as all
lines are consistently offset from the source, and extend the leading
whitespace in the REPLACE string accordingly.

Release Notes:

- N/A
2025-03-19 09:39:00 -03:00
Piotr Osiewicz
9377ef9817 feature_flags: Do not enable feature flags by default in dev builds (#27065)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 12:20:26 +00:00
Piotr Osiewicz
c3b5046347 editor: Do not use breakpoint color for run indicators (#27063)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 11:54:14 +00:00
Piotr Osiewicz
44fff08ed6 util: Include path to asset in panic message from asset_str (#27059)
Somebody on Discord ran into issues with running the debugger which goes
down to an unwrap in asset_str. Let's print a path that was accessed.

Release Notes:

- N/A
2025-03-19 11:09:51 +00:00
Anthony Eid
d4daa0a3a2 Show debug console evaluation response (#27050)
We weren't incrementing the output token when getting responses from the
debug evaluation request which caused some output to not be displayed.
(Usually the evaluation response, but that could cascade into other
output events not showing)


Release Notes:
- N/A

Co-authored-by: Remco Smits <djsmits12@gmail.com>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
2025-03-19 05:37:32 +00:00
Conrad Irwin
81582cd7f3 Don't render breakpoint indicators on top of expand arrows (#27048)
Closes #ISSUE

cc @Anthony-Eid. One thing I noticed while doing this is that we do an
invalid cast here from DisplayPoint.row to MultiBufferRow. These are not
the same if you have soft-wrap enabled (or anything else in the display
map that's not in the editor).

Release Notes:

- N/A
2025-03-19 05:00:41 +00:00
Ryan Hawkins
0f5a3afe94 Support built-in Zed prompts for all platforms (#26201)
This pull request does two things:

1. Adds a setting to force Zed to use the built-in prompts, instead of
the system provided ones. I've personally found the system prompts on
macOS often fail to respond to keyboard input, are slow to render
initially, and don't match Zed's style.
2. Makes the previously Linux-only Zed provided prompts available to
everybody using the above setting.

Release Notes:
- Added support for a built-in prompting system, regardless of platform.
Use the new `use_system_prompts` setting to control whether to use the
system provided prompts or Zed's built-in system. Note that on Linux,
this setting has no effect, as Linux doesn't have a system prompting
mechanism.
2025-03-18 22:27:09 -06:00
CharlesChen0823
382f9f6151 language_tools: Fix buffer search keeping focusing when pressing enter in vim mode (#26266)
Closes #25643 

Release Notes:

- Fixed buffer search keep focus when pressing enter in vim mode

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-19 04:25:29 +00:00
5brian
15d2420031 workspace::Open: Fix trapped cursor/selection on update (#25402)
Closes #ISSUE

Issue: Selection index does not reset when the matches update, which can
lead to the selection getting trapped when that index does not exist in
the next matches.


https://github.com/user-attachments/assets/d3fab23f-750c-47fb-bd3b-a0c42f214c83

This is in workspace::Open with   "use_system_path_prompts": false

Release Notes:

- N/A

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-18 22:19:11 -06:00
CharlesChen0823
026c7274d9 workspace: Add function to save new file in directory nearest tab (#22563)
Closes #15685

Release Notes:

- save new file in directory neasrest tab

---------

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-19 03:41:04 +00:00
Mikayla Maki
1aefa5178b Move "async move" a few characters to the left in cx.spawn() (#26758)
This is the core change:
https://github.com/zed-industries/zed/pull/26758/files#diff-044302c0d57147af17e68a0009fee3e8dcdfb4f32c27a915e70cfa80e987f765R1052

TODO:
- [x] Use AsyncFn instead of Fn() -> Future in GPUI spawn methods
- [x] Implement it in the whole app
- [x] Implement it in the debugger 
- [x] Glance at the RPC crate, and see if those box future methods can
be switched over. Answer: It can't directly, as you can't make an
AsyncFn* into a trait object. There's ways around that, but they're all
more complex than just keeping the code as is.
- [ ] Fix platform specific code

Release Notes:

- N/A
2025-03-19 02:09:02 +00:00
João Marcos
7f2e3fb5bd Fix git stage race condition with delayed fs events (#27036)
This PR adds a failing test `test_staging_hunks_with_delayed_fs_event`
and makes it pass

Also skips a queued read for git diff states if another read was
requested (less work)

This still doesn't catch all race conditions, but the PR is getting long
so I'll yield this and start another branch

Release Notes:

- N/A
2025-03-18 22:44:36 -03:00
Agus Zubiaga
68a572873b assistant edit tool: Improve bad search output (#27012)
When we failed to match a search string, we were reporting the replace
string as not found, this confuses the model and can make it go into a
doom loop. This PR fixes that improves the error output in general to
help it recover faster.

Release Notes:

- N/A
2025-03-18 21:53:20 -03:00
Piotr Osiewicz
c042a02cf4 debugger: First slight pass at UI (#27034)
- Collapse Launch and Attach into a single split button
- Fix code actions indicator being colored red.

Release Notes:

- N/A
2025-03-19 00:15:48 +00:00
Julia Ryan
73ac3d9a99 nix: Fix LDFLAGS rpath (#26912)
By default stdenv strips all unused rpaths, but we use a few libraries
that are `dlopen`'d so we need to stop it from removing those. The
[`dontPatchELF`
flag](https://ryantm.github.io/nixpkgs/stdenv/stdenv/#var-stdenv-dontPatchELF)
disables that and makes the nix build work on wayland again.

Fix #26905
Close #26864

Release Notes:

- N/A
2025-03-18 17:04:27 -07:00
Peter Tripp
2269f996f7 Add more shortcuts for delete/restore in Git Panel (#27004)
Release Notes:

- N/A
2025-03-18 18:52:28 -04:00
Marshall Bowers
e9033a75ac assistant2: Remove unneeded debug logging (#27030)
This PR removes the debug logging added in
https://github.com/zed-industries/zed/pull/23722, as we no longer need
it.

Release Notes:

- N/A
2025-03-18 22:12:04 +00:00
Marshall Bowers
a2ae6a1c77 assistant2: Add tool lists for each context server (#27029)
This PR updates the list of context servers with the ability to view the
tools provided by the context server:

<img width="1394" alt="Screenshot 2025-03-18 at 5 53 05 PM"
src="https://github.com/user-attachments/assets/4ffe93dd-f9e9-44e7-877f-656ebf45a326"
/>

Release Notes:

- N/A
2025-03-18 22:04:47 +00:00
Jason Lee
985ac4e5f2 gpui: Reduce window.refresh to improve cache hit of the cached views (#25009)
Release Notes:

- Improved performance when using the scroll wheel and some other mouse
interactions.

Based on some cache details about GPUI `AnyView::cached` that I found in
the discussion of
https://github.com/zed-industries/zed/discussions/24260#discussioncomment-12135749,
and combined with the optimization points found in actual applications.

This change may have some scenarios that I have not considered, so I
just make a draft to put forward my ideas first for discussion.

From my analysis, `AnyView::cached` will always invalid by Div's mouse
events, because of it called `window.refresh`. I understand that (mouse
move event) this is because the interface changes related to hover and
mouse_move will be affected style, so `window.refresh` is required.
Since Div does not have the `entity_id` of View, it is impossible to
know which View should be refreshed, so the entire window can only be
refreshed.

With this change, we can reduce a lot of `render` method calls on
ScrollWheel or Mouse Event.
2025-03-18 14:52:20 -07:00
Kirill Bulatov
89ae4ca9a3 Fix debugger docs a bit (#27026)
Tried adding a custom debugging tasks to discover two more required
properties missing from the docs.

Release Notes:

- N/A
2025-03-18 21:46:11 +00:00
Marshall Bowers
1d4afe6daa assistant2: Add context server list to configuration view (#27028)
This PR adds a context server list to the configuration view in
Assistant2:

<img width="1394" alt="Screenshot 2025-03-18 at 5 26 23 PM"
src="https://github.com/user-attachments/assets/58bf3920-1e35-4cb8-a32a-5ae9f98ce387"
/>

Release Notes:

- N/A
2025-03-18 21:41:39 +00:00
Joseph T. Lyons
777c88bcea Clean up community_release_actions file (#27027)
Release Notes:

- N/A
2025-03-18 21:29:22 +00:00
Kirill Bulatov
959a024861 Omit json-language-server from the scope_opt_in_language_servers (#27023)
Follow-up of https://github.com/zed-industries/zed/pull/26574/files

After that PR, settings.json stopped giving completions when `"` was
typed as a key:

https://github.com/user-attachments/assets/5ff03863-024c-4c28-a7cd-8ef48a1695d8

This goes down to 


fb12863999/crates/language/src/language.rs (L1736-L1748)

which was empty before the PR, hence leading to lower `true` branch.
Now, when typing `"`, there's no scope according to 


fb12863999/crates/project/src/lsp_store.rs (L4529-L4532)

return result.

Removing `json-language-server` from `scope_opt_in_language_servers`
seems to preserve the `:` fix and restore the completions behavior.


Release Notes:

- N/A
2025-03-18 21:08:43 +00:00
Joseph T. Lyons
ed510b5e93 Remove unused AssistantThreadFeedback event (#27021)
It looks like:

- https://github.com/zed-industries/zed/pull/26780

accidentally added a new event type, `AssistantThreadFeedback`, using
the old event system, that it didn't end up actually using, as the code
actually relies on using the newer (preferred) `telemetry::event!()`.

Release Notes:

- N/A
2025-03-18 20:39:54 +00:00
Peter Tripp
674c572a28 ci: Run stalebot checks multiple times to ensure completion (#27017)
Stalebot has a maximum operations-per-run which is set at 1000. As a
result it may require multiple runs to successfully complete.

This morning it took [three
runs](https://github.com/zed-industries/zed/actions/runs/13921563707/attempts/1)
so set it to run three times two hours apart to avoid hitting github API
limits.

Release Notes:

- N/A
2025-03-18 16:34:24 -04:00
Martin Fischer
4a39fc2644 gpui: Provide workaround for AMD Linux driver bug (#26890)
There apparently is some amdgpu/radv bug that rendering with
multisample anti-aliasing (MSAA) results in a crash when the bounds
of a triangle list exceed 1024px, which in Zed happens with the default
buffer font size when you select a line with more than 144 characters.

This crash has been reported as #26143.

This commit introduces a workaround: you can set the
ZED_PATH_SAMPLE_COUNT=0
environment variable to disable MSAA and the error message we print
when a GPU crash is encountered with radv now suggests trying this
environment
variable as a workaround and links the respective issue.

Sidenote: MSAA was introduced in
f08b1d78ec
so you didn't run into this driver bug with versions < 0.173.8.

Release Notes:

- Added a workaround for an AMD Linux driver bug that causes Zed to
crash when selecting long lines.
2025-03-18 20:11:09 +00:00
Agus Zubiaga
48fe134408 assistant edit tool: Create file when search/replace is empty (#27009)
We used to fail when this happened, but we saw the model use it as a way
to create empty files, which makes sense.

Release Notes:

- N/A
2025-03-18 18:35:11 +00:00
Cole Miller
22b8662275 Fix syntax highlighting of git commit messages (#26988)
- Load syntax colors into commit message editors
- Fix name mismatches that were preventing the git commit grammar and
language config from being matched up

Release Notes:

- Fixed git commit messages not being syntax-highlighted
2025-03-18 18:18:56 +00:00
Marshall Bowers
cc36cd9768 extensions_ui: Add ability to filter extensions by category (#27005)
This PR adds the ability to filter the list of extensions by category:


https://github.com/user-attachments/assets/ea7b518e-4769-4e2e-8bbe-e75f9f01edf9

Release Notes:

- Added the ability to filter the list of extensions by category.
2025-03-18 17:59:58 +00:00
KyleBarton
628a61d929 docs: Specify the command for activating prompt library from the command palette (#27007)
Quickfix of the docs as I read through and get familiar with the
assistant interface.
`prompt-library: toggle` does not appear to be a live command in
`cmd-shift-p` - instead I see `assistant: deploy prompt library`. This
change to the docs reflects that. It also notes that this command can
only be activated from within the assistant panel (the command is not
accessible from a standard editor panel).

Release Notes:

- N/A
2025-03-18 13:56:22 -04:00
Cole Miller
7f23875c5e Fold git merge messages into commit editor placeholder text (#26992)
This PR changes the git commit message editors to surface git's
suggested merge message, if any, as placeholder text, as opposed to
"real" buffer text as was previously the case.

Release Notes:

- Changed git commit message editors to use placeholder text for git's
suggested merge messages
2025-03-18 17:21:20 +00:00
Cole Miller
e7bba1c252 Improvements to interactive hard wrap behavior (#26953)
Release Notes:

- Fixed involuntary joining of lines when typing in the commit message
editor
- Fixed being unable to type whitespace after a comment character at the
start of a line in the commit message editor
2025-03-18 17:05:08 +00:00
Remco Smits
41a60ffecf Debugger implementation (#13433)
###  DISCLAIMER

> As of 6th March 2025, debugger is still in development. We plan to
merge it behind a staff-only feature flag for staff use only, followed
by non-public release and then finally a public one (akin to how Git
panel release was handled). This is done to ensure the best experience
when it gets released.

### END OF DISCLAIMER 

**The current state of the debugger implementation:**


https://github.com/user-attachments/assets/c4deff07-80dd-4dc6-ad2e-0c252a478fe9


https://github.com/user-attachments/assets/e1ed2345-b750-4bb6-9c97-50961b76904f

----

All the todo's are in the following channel, so it's easier to work on
this together:
https://zed.dev/channel/zed-debugger-11370

If you are on Linux, you can use the following command to join the
channel:
```cli
zed https://zed.dev/channel/zed-debugger-11370 
```

## Current Features

- Collab
  - Breakpoints
    - Sync when you (re)join a project
    - Sync when you add/remove a breakpoint
  - Sync active debug line
  - Stack frames
    - Click on stack frame
      - View variables that belong to the stack frame
      - Visit the source file
    - Restart stack frame (if adapter supports this)
  - Variables
  - Loaded sources
  - Modules
  - Controls
    - Continue
    - Step back
      - Stepping granularity (configurable)
    - Step into
      - Stepping granularity (configurable)
    - Step over
      - Stepping granularity (configurable)
    - Step out
      - Stepping granularity (configurable)
  - Debug console
- Breakpoints
  - Log breakpoints
  - line breakpoints
  - Persistent between zed sessions (configurable)
  - Multi buffer support
  - Toggle disable/enable all breakpoints
- Stack frames
  - Click on stack frame
    - View variables that belong to the stack frame
    - Visit the source file
    - Show collapsed stack frames
  - Restart stack frame (if adapter supports this)
- Loaded sources
  - View all used loaded sources if supported by adapter.
- Modules
  - View all used modules (if adapter supports this)
- Variables
  - Copy value
  - Copy name
  - Copy memory reference
  - Set value (if adapter supports this)
  - keyboard navigation
- Debug Console
  - See logs
  - View output that was sent from debug adapter
    - Output grouping
  - Evaluate code
    - Updates the variable list
    - Auto completion
- If not supported by adapter, we will show auto-completion for existing
variables
- Debug Terminal
- Run custom commands and change env values right inside your Zed
terminal
- Attach to process (if adapter supports this)
  - Process picker
- Controls
  - Continue
  - Step back
    - Stepping granularity (configurable)
  - Step into
    - Stepping granularity (configurable)
  - Step over
    - Stepping granularity (configurable)
  - Step out
    - Stepping granularity (configurable)
  - Disconnect
  - Restart
  - Stop
- Warning when a debug session exited without hitting any breakpoint
- Debug view to see Adapter/RPC log messages
- Testing
  - Fake debug adapter
    - Fake requests & events

---

Release Notes:

- N/A

---------

Co-authored-by: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com>
Co-authored-by: Anthony Eid <hello@anthonyeid.me>
Co-authored-by: Anthony <anthony@zed.dev>
Co-authored-by: Piotr Osiewicz <peterosiewicz@gmail.com>
Co-authored-by: Piotr <piotr@zed.dev>
2025-03-18 12:55:25 -04:00
Marshall Bowers
ed4e654fdf assistant_tools: Add fetch tool (#26999)
This PR adds a new `fetch` tool to the set of tools the Assistant has
available.

This tool accepts a URL and fetches the content as Markdown.

<img width="1394" alt="Screenshot 2025-03-18 at 11 52 21 AM"
src="https://github.com/user-attachments/assets/e5bcde14-a0dd-4835-9d42-8f45def68f4d"
/>

<img width="1394" alt="Screenshot 2025-03-18 at 11 52 37 AM"
src="https://github.com/user-attachments/assets/3bcce4f5-f61b-40d7-8b30-2c673ce3c06a"
/>

Release Notes:

- N/A
2025-03-18 16:25:51 +00:00
Cole Miller
baaafddbeb worktree: Fix tracking of git status scans and re-enable tests (#26926)
Closes #ISSUE

Release Notes:

- N/A
2025-03-18 12:23:46 -04:00
Marshall Bowers
b70f21c08d assistant_tools: Rename RegexSearchTool module to match the others (#27001)
This PR renames the `RegexSearchTool` module to `regex_search_tool.rs`
to match the other tools.

Release Notes:

- N/A
2025-03-18 16:20:15 +00:00
Agus Zubiaga
5615be51cc assistant edit tool: Revert fuzzy matching (#26996)
#26935 is leading to bad edits, so let's revert it for now. I'll bring
back a version of this, but it'll likely just focus on indentation
instead of making the whole search fuzzy.

Release Notes: 

- N/A
2025-03-18 13:08:09 -03:00
Richard Feldman
06e9f0e309 Paginate regex and path search tools (#26997)
<img width="630" alt="Screenshot 2025-03-18 at 10 50 17 AM"
src="https://github.com/user-attachments/assets/0aee5367-402a-405a-8676-f2f8af425b1e"
/>

Release Notes:

- N/A
2025-03-18 15:44:41 +00:00
Marshall Bowers
41a2be7e54 assistant2: Keep the tool selector open when toggling tools (#26994)
This PR makes it so the tool selector will stay open when toggling tools
instead of closing after each selection:


https://github.com/user-attachments/assets/eb987785-cfb5-4b07-8d63-510fbd9d9bf1

This involved making a change to `ContextMenu` to allow it to rebuild
its menu items after each confirmation in order for them to reflect
their selected/unselected status. I intend to clean up the `ContextMenu`
API a bit at a later point, but that is out of scope for this PR.

Release Notes:

- N/A
2025-03-18 15:30:05 +00:00
Cole Miller
e38ae423f1 Add missing commit event reporting (#26990)
cc @morgankrey 

Release Notes:

- N/A
2025-03-18 14:52:32 +00:00
Peter Tripp
68bb3bd5eb Add more shortcuts for editor::OrganizeImports (#26932)
Follow-up to:
- https://github.com/zed-industries/zed/pull/25793

Release Notes:

- N/A
2025-03-18 10:51:12 -04:00
Richard Feldman
122e73f152 Allow read-file tool to read a subset of a file (#26966)
Release Notes:

- N/A
2025-03-18 10:03:15 -04:00
Smit Barmase
4b775505f5 migrator: Fix case where users see migration banner despite no diff changes (#26982)
Fixes edge case where after carrying out all migrations if final text is
same as existing text, we don't need to ask user to do anything, despite
migrations edits are being applied internally. E.g. A -> B - > C -> A

Release Notes:

- N/A
2025-03-18 19:24:24 +05:30
Marshall Bowers
a9f7c0549c docs: Use correct name for Intelephense license file (#26986)
This PR updates the Intelephense section of the PHP docs to use the
correct name for the license file.

Intelephense uses British English:

<img width="1185" alt="Screenshot 2025-03-18 at 8 30 20 AM"
src="https://github.com/user-attachments/assets/a675e854-bedf-4f70-bf8f-90488d196242"
/>

Release Notes:

- N/A
2025-03-18 12:31:57 +00:00
Kirill Bulatov
ac617e278e Keep and filter word completions on input, if the menu is open (#26979)
Follow-up of https://github.com/zed-industries/zed/pull/26410

Release Notes:

- N/A
2025-03-18 13:19:32 +02:00
Bennet Bo Fenner
26f4b2a491 assistant2: Combine file & directory picker (#26975)
In the process of adding `@mentions` we realized that we do not want to
make a distinction between Files & Directories in the UI, therefore this
PR combines the File & Directory pickers into a unified version



https://github.com/user-attachments/assets/f3bf189c-8b69-4f5f-90ce-0b83b12dbca3

(Ignore the `@mentions`, they are broken also on main)

Release Notes:

- N/A
2025-03-18 09:49:25 +00:00
Sheik Althaf
fdcacb3849 typescript: Add highlighting for Angular inline components (#26553)
Closes #ISSUE

Release Notes:

- N/A *or* Added/Fixed/Improved ...

Before
<img width="1004" alt="image"
src="https://github.com/user-attachments/assets/9a611e8d-e00e-4dc7-b4c9-bd76fec95525"
/>

After
<img width="936" alt="Image"
src="https://github.com/user-attachments/assets/b83d3309-1aab-492c-a2f1-c45cd19e6bcc"
/>
2025-03-18 09:27:48 +00:00
Michael Sloan
f61d3d28e0 Use futures::future::join_all instead of futures::stream in assistant_eval (#26974)
Release Notes:

- N/A
2025-03-18 08:22:18 +00:00
tidely
a5621662b2 Update to git2 0.20.1 (#26972)
Switch back to git2 releases after
https://github.com/rust-lang/git2-rs/pull/1120 got merged in 0.20.1

Release Notes:

- N/A
2025-03-18 10:05:15 +02:00
Michael Sloan
b6198ad516 Add Ord and PartialOrd impls for gpui entity types (#26968)
Motivation is to be able to use entities as TreeMap keys.

Release Notes:

- N/A

Co-authored-by: Nathan <nathan@zed.dev>
2025-03-18 06:20:21 +00:00
Conrad Irwin
5210d9e8b4 Tidier multibuffer (#26954)
Makes multibuffer headers less close to the top of the file.

Moves multibuffer line numbers one em to the right to make space for the
expand excerpt button on large line numbers.

Release Notes:

- N/A

---------

Co-authored-by: Danilo Leal <daniloleal09@gmail.com>
2025-03-17 22:26:27 -06:00
Michael Sloan
1139904ef5 Remove unnecessary conditional definition of FS_WATCH_LATENCY (#26967)
This was added in #8343 to make it only visible for tests. #9189 then
made it visible regardless of `test-support`, so the definitions became
identical.

Release Notes:

- N/A
2025-03-17 22:10:16 -06:00
Joseph T. Lyons
b4ef3791bb Send stable release notes email (#26964)
Release Notes:

- N/A
2025-03-17 23:25:41 -04:00
Cole Miller
88907eeb38 git: Always zero panel's entry counts when clearing entries (#26924)
Keep the panel's state consistent even when we transition to having no
active repository.

Release Notes:

- N/A
2025-03-17 22:54:21 -04:00
Marshall Bowers
cd5d7e82d0 collab: Make account age-related fields required in LlmTokenClaims (#26959)
This PR makes the account age-related fields required in
`LlmTokenClaims`.

We've also removed the account age check from the LLM token issuance
endpoint, instead having it solely be enforced in the `POST /completion`
endpoint.

This change will be safe to deploy at ~8:01PM EDT.

Release Notes:

- N/A
2025-03-17 19:54:44 -04:00
Marshall Bowers
0851842d2c collab: Defer account age check to POST /completion endpoint (#26956)
This PR defers the account age check to the `POST /completion` endpoint
instead of doing it when an LLM token is generated.

This will allow us to lift the account age restriction for using Edit
Prediction.

Note: We're still temporarily performing the account age check when
issuing the LLM token until this change is deployed and the LLM tokens
have had a chance to cycle.

Release Notes:

- N/A
2025-03-17 22:42:29 +00:00
Marshall Bowers
1397e01735 collab: Clean up LLM token creation (#26955)
This PR cleans up the LLM token creation a bit.

We now pass in the entire list of feature flags to the
`LlmTokenClaims::create` method to prevent having a bunch of confusable
`bool` parameters.

Release Notes:

- N/A
2025-03-17 22:25:43 +00:00
Max Brunsfeld
2b2b9c1624 Make repo and branch popovers extend up from their trigger buttons (#26950)
Previously, when clicking on the branch, the popover would obscure the
button you just clicked, which was awkward.

Release Notes:

- Improved the placement of the repo and branch picker popovers in the
git panel.
- Added a 'SelectRepo' action that opens the repository selector in a
modal.
2025-03-17 15:05:17 -07:00
Agus Zubiaga
a05066cd83 assistant edit tool: Track read buffers and notify model of user edits (#26952)
When the model reads file, we'll track the version it read, and let it
know if the user makes edits to the buffer. This helps prevent edit
failures because it'll know to re-read the file before.

Release Notes:

- N/A
2025-03-17 21:50:16 +00:00
Smit Barmase
cb439e672d editor: Fix navigate back for locations opened via preview item (#26943)
Closes #25458

When navigating code from a preview tab with
`enable_preview_from_code_navigation` set to `true`, "Go Back" from a
newly opened tab could focus on the tab to the right instead of
returning to the original preview tab.

Before, we killed the existing preview tab before opening a new one,
which breaking history as the new tab had no reference to the old one.
This caused navigation to shift to the next tab on the right.

Now, we first add the new tab at the preview index, and then kill the
existing preview tab. This preserves the history by linking new preview
tab to existing tab.

Release Notes:

- Fixes an issue where navigating code from a preview tab with
`enable_preview_from_code_navigation` set to `true`, "Go Back" from a
newly opened tab could focus on the tab to the right instead of
returning to the original preview tab.
2025-03-18 00:59:36 +05:30
Mostafa Mahmoud
6b0a282c9c docs: Fix wrong html-like tags shortcut for Vim (#26792)
Release Notes:

- N/A
2025-03-17 12:40:07 -06:00
Conrad Irwin
25772b8777 Fix sticky header in last buffer of a multibuffer (#26944)
This also simplifies our code to stop generating a last excerpt boundary
that we always ignore.

Closes #ISSUE

Release Notes:

- N/A
2025-03-17 18:39:57 +00:00
Agus Zubiaga
94b63808e0 assistant edit tool: Fuzzy match search block (#26935)
Release Notes:

- N/A

Co-authored-by: Antonio Scandurra <me@as-cii.com>
2025-03-17 18:33:20 +00:00
Cole Miller
798af67dc1 Disable the other flaky tests (#26942)
I thought it might be just `test_file_status` this time, but it seems to
be all four of the tests that we were previously seeing issues with.

Release Notes:

- N/A
2025-03-17 18:10:42 +00:00
Anthony Eid
db1d2defa5 Sync git button states between project diff & git panel (#26938)
Closes #ISSUE

Release Notes:

- Git action buttons are now synced between the project diff and git
panel

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
Co-authored-by: Piotr Osiewicz <peterosiewicz@gmail.com>
2025-03-17 14:08:32 -04:00
Max Brunsfeld
430bd83e4d Don't open the commit editor when staging last hunk (#26939)
Closes #26880

Release Notes:

- Removed a behavior where staging the last hunk in the project diff
would open the commit modal.
2025-03-17 10:58:04 -07:00
Cole Miller
dbe5399fc4 Remove disabling effect on the stage and unstage toolbar buttons (#26936)
Closes #26883

Release Notes:

- N/A
2025-03-17 13:48:04 -04:00
João Marcos
aba242d576 Document gutter_debounce (#26940)
Release Notes:

- N/A
2025-03-17 14:47:37 -03:00
Mikayla Maki
ddc210abfc Add website docs for the hunk_style variants (#26937)
Follow up to https://github.com/zed-industries/zed/pull/26816

Release Notes:

- N/A
2025-03-17 10:36:31 -07:00
Jakub Charvat
65994c0576 Add git.hunk_style setting for gutter hollow hunk behavior (#26816)
This is a follow up to #26809, introducing `git.hunk_style` setting to
control whether staged or unstaged hunks are shown as hollow.

Reused `GitHunkStyleSetting` which was left over from #26504.

Release Notes:

- Added `git.hunk_style` setting to control whether staged or unstaged
hunks are hollow.
2025-03-17 10:24:49 -07:00
João Marcos
011f823f33 Move buffer diff storage from BufferStore to GitStore (#26795)
Release Notes:

- N/A

---------

Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
Co-authored-by: max <max@zed.dev>
2025-03-17 17:02:32 +00:00
Marco Roth
3d1ae68f83 docs: Remove duplicate indent_guides setting in Project panel options (#26927)
Just a small docs pull request to remove the duplicate `indent_guides`
setting in the Project panel options section of the docs. There is also
another `indent_guides` on
[L2585](https://github.com/zed-industries/zed/pull/26927/files#diff-c2decf822f3085926bd23bdf175700222dfd8193d48ea39908d4bb1d1a7c6240R2584).
~~Let me know if you prefer to remove the other instance, thank you!~~

Edit: I just realized it's the old setting, `indent_guides` now expects
a object.
2025-03-17 16:22:39 +00:00
Agus Zubiaga
1f62274a89 assistant edit tool: Return applied actions back to main model (#26810)
We'll now include the search/replace block that got applied as part of
the tool output. We think this will help the model have a better idea of
how the file changed and prevent later edit failures.

Release Notes:

- N/A
2025-03-17 13:21:35 -03:00
Cole Miller
c2f62d261b Disable flaky file status test again (#26925)
Failure on an unrelated commit:
https://github.com/zed-industries/zed/actions/runs/13903012863/job/38899239052

Release Notes:

- N/A
2025-03-17 15:44:55 +00:00
khayyam
7d433a30ec git_hosting_providers: Allow configuring additional hosting providers via settings (#26879)
Release Notes:

- Added a new `git_hosting_providers` setting for configuring custom Git
hosting providers.

---------

Co-authored-by: Marshall Bowers <git@maxdeviant.com>
2025-03-17 15:39:52 +00:00
Max Brunsfeld
52567f4b72 Eliminate unnecessary macros in languages crate (#26813)
I vibe coded this in Zed, dawg.

This avoids a number of cases where we created multiple copies of the
same lsp adapter. Now we clone the Arcs.

Release Notes:

- N/A
2025-03-17 08:13:32 -07:00
Richard Feldman
a0ee84d3ac Use the main thread less on search tool (#26732)
Release Notes:

- N/A
2025-03-17 16:02:22 +01:00
Danilo Leal
6cac0b33dc docs: Add marker to signal which languages are built into Zed (#26913)
I saw over the weekend some social media posts that indicated people
didn't know which languages are included in Zed by default. We do say
that on each language-specific page, but I figured having this
high-level view on the languages page wouldn't hurt.

Release Notes:

- N/A
2025-03-17 11:27:52 -03:00
Marshall Bowers
45606abfdb git_hosting_providers: Refactor constructors (#26919)
This PR refactors the constructors for the various Git hosting providers
to facilitate adding support for more self-hosted variants.

Release Notes:

- N/A
2025-03-17 13:46:58 +00:00
Marshall Bowers
8ba6ce43ac git_hosting_providers: Fix incorrect name for SourceHut (#26915)
This PR fixes an issue where the SourceHut Git hosting provider was
using the wrong name.

Release Notes:

- N/A
2025-03-17 13:32:23 +00:00
Agus Zubiaga
040d42fc24 assistant tools: Fix running tests locally (#26914)
Without this, we running into the following error:

```
Running into this when running tests. Is this  
dyld[45041]: Library not loaded: @rpath/WebRTC.framework/WebRTC
  Referenced from: <B2EA63A5-994E-3FB0-A74B-C9C4F7E5C1EF> /Users/aguz/zed/zed/target/debug/deps/assistant_tools-522d7745dd439dfb
  Reason: no LC_RPATH's found
```

Thanks Piotr!

Release Notes:

- N/A
2025-03-17 13:32:19 +00:00
Julia Ryan
22d905dc03 nix: Allow auto-update in the devshell (#26911)
Our direnv integration was making zed refuse to auto-update when you had
the zed repo open with the devshell active. This was happening even when
you used a non-nix build of zed, which actually should be able to
auto-update.

I'm a bit unsure of why we check for the `ZED_UPDATE_EXPLANATION` env
var [both at build time _and_ at
runtime](2828dcb67b/crates/auto_update/src/auto_update.rs (L149)),
but I can see an argument for why people might want that so I'll just do
the less intrusive change for now and leave the var out of the devshell.

Release Notes:

- N/A
2025-03-17 04:49:56 -07:00
0x2CA
bf735da3f2 Support extended keys on Mac (F20-F35) (#26899)
Closes #4640

About the support limit of Fn:

Mac F1-F35
Win F1-F24
Linux F1-F35
Terminal F1-F20

Release Notes:

- Improved support for extended keyboards on Mac (F20-F35)
2025-03-17 12:56:46 +02:00
Antonio Scandurra
210d8d5530 Allow cancellation of tool uses (#26906)
Release Notes:

- N/A
2025-03-17 09:53:18 +00:00
Color Fuzzy
a0f995d2ae Support SSH usernames which contain @ symbols (#25314)
Closes #25246

Release Notes:

- SSH: Improved handling of multiple `@` in connection strings: e.g.
`ssh jim.lv@es2@10.220.67.57@11.239.1.231` improving support of jump
hosts running JumpServer.

---------

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-17 05:10:21 +00:00
Piotr Osiewicz
8f560daec2 chore: Extract http-client-tls crate (#26895)
http-client pulled in rustls which in turn meant that gpui depended on
rustls/aws-lc-sys. This commit extracts http-client-tls crate to
separate the http-client and tls dependencies.

Closes #ISSUE

Release Notes:

- N/A
2025-03-17 02:36:37 +00:00
Richard Feldman
d5bb12631a Delete tool uses paths instead of globs (#26715)
Also made `run` avoid doing work on the main thread.

Release Notes:

- N/A
2025-03-16 11:58:25 +01:00
Kirill Bulatov
8a31dcaeb0 Use textDocument/codeLens data in the actions menu when applicable #2 (#26848)
Re-applies what's been reverted in
https://github.com/zed-industries/zed/pull/26832 with an action-related
fix in
64b5d37d32

Before, actions were resolved only if `data` is present and either of
the possible fields is empty:

e842b4eade/crates/project/src/lsp_store.rs (L1632-L1633)

But Zed resolves completions and inlays once, unconditionally, and the
reverted PR applied the same strategy to actions.
That did not work despite the spec not forbidding `data`-less actions to
be resolved.

Soon, it starts to work due to
https://github.com/rust-lang/rust-analyzer/pull/19369 but it seems safer
to restore the original filtering code.

Code lens have no issues with `data`-less resolves:

220d913cbc/crates/rust-analyzer/src/handlers/request.rs (L1618-L1620)

so the same approach as completions and inlays is kept: resolve once.


Release Notes:

- N/A
2025-03-15 20:09:32 +00:00
Michael Sloan
ef91e7afae Minor optimization of line number length logic (#26845)
In `layout_excerpt_gutter`, compute max line number length once instead
of for every row

In `max_line_number_width`, use ilog10 instead of converting to floats
and back

Release Notes:

- N/A
2025-03-15 19:28:50 +00:00
Michael Sloan
c220fb387d Fix panic when providing 0 to ilog10 in line number length logic (#26844)
Introduced in #24428

Release Notes:

- N/A
2025-03-15 19:10:54 +00:00
Smit Barmase
adbde210fd termina: Fix text selection for first line scrolls up (#26842)
Closes #21626
 
Now scroll will only happen when cursor goes beyond the bounds of
terminal.
 
 Before:
 


https://github.com/user-attachments/assets/9ac48e80-d0e0-44c9-87ad-14ed748de78d


 After:


https://github.com/user-attachments/assets/c697c1fc-a6d2-4b9a-aad4-5b0c79837c2a
 
Release Notes:

- Fixed an issue where selecting the first line in the terminal would
cause it to scroll.
2025-03-15 22:33:16 +05:30
494 changed files with 37335 additions and 7800 deletions

View File

@@ -10,7 +10,7 @@ runs:
cargo install cargo-nextest --locked cargo install cargo-nextest --locked
- name: Install Node - name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "18" node-version: "18"

View File

@@ -16,7 +16,7 @@ runs:
run: cargo install cargo-nextest --locked run: cargo install cargo-nextest --locked
- name: Install Node - name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "18" node-version: "18"

View File

@@ -482,7 +482,7 @@ jobs:
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps: steps:
- name: Install Node - name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "18" node-version: "18"
@@ -526,14 +526,14 @@ jobs:
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label - name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with: with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label - name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }} if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with: with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
@@ -586,7 +586,7 @@ jobs:
run: script/bundle-linux run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label - name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: | if: |
github.ref == 'refs/heads/main' github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') || contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -595,7 +595,7 @@ jobs:
path: target/release/zed-*.tar.gz path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label - name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: | if: |
github.ref == 'refs/heads/main' github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') || contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -647,7 +647,7 @@ jobs:
run: script/bundle-linux run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label - name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: | if: |
github.ref == 'refs/heads/main' github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') || contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -656,7 +656,7 @@ jobs:
path: target/release/zed-*.tar.gz path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label - name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: | if: |
github.ref == 'refs/heads/main' github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling') || contains(github.event.pull_request.labels.*.name, 'run-bundling')

View File

@@ -1,7 +1,7 @@
name: "Close Stale Issues" name: "Close Stale Issues"
on: on:
schedule: schedule:
- cron: "0 11 * * 2" - cron: "0 7,9,11 * * 2"
workflow_dispatch: workflow_dispatch:
jobs: jobs:

View File

@@ -13,11 +13,12 @@ jobs:
id: get-release-url id: get-release-url
run: | run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest" URL="https://zed.dev/releases/preview/latest"
else else
URL="https://zed.dev/releases/stable/latest" URL="https://zed.dev/releases/stable/latest"
fi fi
echo "::set-output name=URL::$URL"
echo "URL=$URL" >> $GITHUB_OUTPUT
- name: Get content - name: Get content
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1 uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
id: get-content id: get-content
@@ -33,3 +34,35 @@ jobs:
with: with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }} webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: ${{ steps.get-content.outputs.string }} content: ${{ steps.get-content.outputs.string }}
send_release_notes_email:
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
- name: Check if release was promoted from preview
id: check-promotion-from-preview
run: |
VERSION="${{ github.event.release.tag_name }}"
PREVIEW_TAG="${VERSION}-pre"
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
echo "was_promoted_from_preview=true" >> $GITHUB_OUTPUT
else
echo "was_promoted_from_preview=false" >> $GITHUB_OUTPUT
fi
- name: Send release notes email
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
run: |
TAG="${{ github.event.release.tag_name }}"
echo \"${{ toJSON(github.event.release.body) }}\" > release_body.txt
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
> release_data.json
curl -X POST "https://zed.dev/api/send_release_notes_email" \
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
-H "Content-Type: application/json" \
-d @release_data.json

View File

@@ -22,7 +22,7 @@ jobs:
version: 9 version: 9
- name: Setup Node - name: Setup Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "20" node-version: "20"
cache: "pnpm" cache: "pnpm"

View File

@@ -37,35 +37,35 @@ jobs:
mdbook build ./docs --dest-dir=../target/deploy/docs/ mdbook build ./docs --dest-dir=../target/deploy/docs/
- name: Deploy Docs - name: Deploy Docs
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3 uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy target/deploy --project-name=docs command: pages deploy target/deploy --project-name=docs
- name: Deploy Install - name: Deploy Install
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3 uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
- name: Deploy Docs Workers - name: Deploy Docs Workers
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3 uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Deploy Install Workers - name: Deploy Install Workers
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3 uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Preserve Wrangler logs - name: Preserve Wrangler logs
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: always() if: always()
with: with:
name: wrangler_logs name: wrangler_logs

View File

@@ -18,7 +18,7 @@ jobs:
version: 9 version: 9
- name: Setup Node - name: Setup Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "20" node-version: "20"
cache: "pnpm" cache: "pnpm"

View File

@@ -23,7 +23,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204 - buildjet-16vcpu-ubuntu-2204
steps: steps:
- name: Install Node - name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "18" node-version: "18"

View File

@@ -71,7 +71,7 @@ jobs:
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps: steps:
- name: Install Node - name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version: "18" node-version: "18"
@@ -170,6 +170,58 @@ jobs:
- name: Upload Zed Nightly - name: Upload Zed Nightly
run: script/upload-nightly linux-targz run: script/upload-nightly linux-targz
bundle-nix:
timeout-minutes: 60
name: (${{ matrix.system.os }}) Nix Build
continue-on-error: true
strategy:
fail-fast: false
matrix:
system:
- os: x86 Linux
runner: buildjet-16vcpu-ubuntu-2204
install_nix: true
- os: arm Mac
# TODO: once other macs are provisioned for nix, remove that constraint from the runner
runner: [macOS, ARM64, nix]
install_nix: false
- os: arm Linux
runner: buildjet-16vcpu-ubuntu-2204-arm
install_nix: true
if: github.repository_owner == 'zed-industries'
runs-on: ${{ matrix.system.runner }}
needs: tests
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# on our macs we manually install nix. for some reason the cachix action is running
# under a non-login /bin/bash shell which doesn't source the proper script to add the
# nix profile to PATH, so we manually add them here
- name: Set path
if: ${{ ! matrix.system.install_nix }}
run: |
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
if: ${{ matrix.system.install_nix }}
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
with:
name: zed-industries
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
- run: nix build
- run: nix-collect-garbage -d
update-nightly-tag: update-nightly-tag:
name: Update nightly tag name: Update nightly tag
if: github.repository_owner == 'zed-industries' if: github.repository_owner == 'zed-industries'

19
.zed/debug.json Normal file
View File

@@ -0,0 +1,19 @@
[
{
"label": "Debug Zed with LLDB",
"adapter": "lldb",
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT"
},
{
"label": "Debug Zed with GDB",
"adapter": "gdb",
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT",
"initialize_args": {
"stopAtBeginningOfMainSubprogram": true
}
}
]

282
Cargo.lock generated
View File

@@ -13,7 +13,6 @@ dependencies = [
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"language", "language",
"lsp",
"project", "project",
"smallvec", "smallvec",
"ui", "ui",
@@ -245,7 +244,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"strum", "strum",
"thiserror 1.0.69", "thiserror 2.0.12",
"util", "util",
] ]
@@ -468,6 +467,7 @@ dependencies = [
"futures 0.3.31", "futures 0.3.31",
"fuzzy", "fuzzy",
"git", "git",
"git_ui",
"gpui", "gpui",
"heed", "heed",
"html_to_markdown", "html_to_markdown",
@@ -692,6 +692,7 @@ name = "assistant_tool"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clock",
"collections", "collections",
"derive_more", "derive_more",
"gpui", "gpui",
@@ -714,6 +715,9 @@ dependencies = [
"feature_flags", "feature_flags",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"html_to_markdown",
"http_client",
"itertools 0.14.0",
"language", "language",
"language_model", "language_model",
"project", "project",
@@ -725,8 +729,10 @@ dependencies = [
"settings", "settings",
"theme", "theme",
"ui", "ui",
"unindent",
"util", "util",
"workspace", "workspace",
"worktree",
] ]
[[package]] [[package]]
@@ -789,9 +795,9 @@ dependencies = [
[[package]] [[package]]
name = "async-compression" name = "async-compression"
version = "0.4.20" version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "310c9bcae737a48ef5cdee3174184e6d548b292739ede61a1f955ef76a738861" checksum = "c0cf008e5e1a9e9e22a7d3c9a4992e21a350290069e36d8fb72304ed17e8f2d2"
dependencies = [ dependencies = [
"deflate64", "deflate64",
"flate2", "flate2",
@@ -973,9 +979,9 @@ dependencies = [
[[package]] [[package]]
name = "async-std" name = "async-std"
version = "1.13.0" version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24"
dependencies = [ dependencies = [
"async-attributes", "async-attributes",
"async-channel 1.9.0", "async-channel 1.9.0",
@@ -1075,9 +1081,9 @@ dependencies = [
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.87" version = "0.1.88"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97" checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -1837,7 +1843,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"strum", "strum",
"thiserror 1.0.69", "thiserror 2.0.12",
"tokio", "tokio",
] ]
@@ -2350,7 +2356,7 @@ dependencies = [
"cap-primitives", "cap-primitives",
"cap-std", "cap-std",
"io-lifetimes", "io-lifetimes",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -2378,7 +2384,7 @@ dependencies = [
"ipnet", "ipnet",
"maybe-owned", "maybe-owned",
"rustix", "rustix",
"windows-sys 0.59.0", "windows-sys 0.52.0",
"winx", "winx",
] ]
@@ -2438,7 +2444,7 @@ dependencies = [
"semver", "semver",
"serde", "serde",
"serde_json", "serde_json",
"thiserror 2.0.6", "thiserror 2.0.12",
] ]
[[package]] [[package]]
@@ -2639,6 +2645,12 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "circular-buffer"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dacb91f972298e70fc507a2ffcaf1545807f1a36da586fb846646030adc542f"
[[package]] [[package]]
name = "clang-sys" name = "clang-sys"
version = "1.8.1" version = "1.8.1"
@@ -2739,6 +2751,7 @@ dependencies = [
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"http_client", "http_client",
"http_client_tls",
"log", "log",
"parking_lot", "parking_lot",
"paths", "paths",
@@ -2755,7 +2768,7 @@ dependencies = [
"telemetry", "telemetry",
"telemetry_events", "telemetry_events",
"text", "text",
"thiserror 1.0.69", "thiserror 2.0.12",
"time", "time",
"tiny_http", "tiny_http",
"tokio-socks", "tokio-socks",
@@ -2887,9 +2900,12 @@ dependencies = [
"clock", "clock",
"collab_ui", "collab_ui",
"collections", "collections",
"command_palette_hooks",
"context_server", "context_server",
"ctor", "ctor",
"dap",
"dashmap 6.1.0", "dashmap 6.1.0",
"debugger_ui",
"derive_more", "derive_more",
"editor", "editor",
"env_logger 0.11.7", "env_logger 0.11.7",
@@ -2953,7 +2969,7 @@ dependencies = [
"telemetry_events", "telemetry_events",
"text", "text",
"theme", "theme",
"thiserror 1.0.69", "thiserror 2.0.12",
"time", "time",
"tokio", "tokio",
"toml 0.8.20", "toml 0.8.20",
@@ -3833,6 +3849,66 @@ dependencies = [
"syn 2.0.100", "syn 2.0.100",
] ]
[[package]]
name = "dap"
version = "0.1.0"
dependencies = [
"anyhow",
"async-compression",
"async-pipe",
"async-tar",
"async-trait",
"client",
"collections",
"dap-types",
"env_logger 0.11.7",
"fs",
"futures 0.3.31",
"gpui",
"http_client",
"language",
"log",
"node_runtime",
"parking_lot",
"paths",
"schemars",
"serde",
"serde_json",
"settings",
"smallvec",
"smol",
"task",
"util",
]
[[package]]
name = "dap-types"
version = "0.0.1"
source = "git+https://github.com/zed-industries/dap-types?rev=bfd4af0#bfd4af084bbaa5f344e6925370d7642e41d0b5b8"
dependencies = [
"schemars",
"serde",
"serde_json",
]
[[package]]
name = "dap_adapters"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"dap",
"gpui",
"language",
"paths",
"regex",
"serde",
"serde_json",
"sysinfo",
"task",
"util",
]
[[package]] [[package]]
name = "dashmap" name = "dashmap"
version = "5.5.3" version = "5.5.3"
@@ -3906,6 +3982,58 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "debugger_tools"
version = "0.1.0"
dependencies = [
"anyhow",
"dap",
"editor",
"futures 0.3.31",
"gpui",
"project",
"serde_json",
"settings",
"smol",
"util",
"workspace",
]
[[package]]
name = "debugger_ui"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"collections",
"command_palette_hooks",
"dap",
"editor",
"env_logger 0.11.7",
"feature_flags",
"futures 0.3.31",
"fuzzy",
"gpui",
"language",
"log",
"menu",
"picker",
"pretty_assertions",
"project",
"rpc",
"serde",
"serde_json",
"settings",
"sysinfo",
"task",
"terminal_view",
"theme",
"ui",
"unindent",
"util",
"workspace",
]
[[package]] [[package]]
name = "deepseek" name = "deepseek"
version = "0.1.0" version = "0.1.0"
@@ -4206,6 +4334,7 @@ dependencies = [
"db", "db",
"emojis", "emojis",
"env_logger 0.11.7", "env_logger 0.11.7",
"feature_flags",
"file_icons", "file_icons",
"fs", "fs",
"futures 0.3.31", "futures 0.3.31",
@@ -4222,6 +4351,7 @@ dependencies = [
"log", "log",
"lsp", "lsp",
"markdown", "markdown",
"menu",
"multi_buffer", "multi_buffer",
"ordered-float 2.10.1", "ordered-float 2.10.1",
"parking_lot", "parking_lot",
@@ -4462,7 +4592,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -4693,7 +4823,6 @@ dependencies = [
"db", "db",
"editor", "editor",
"extension_host", "extension_host",
"feature_flags",
"fs", "fs",
"fuzzy", "fuzzy",
"gpui", "gpui",
@@ -4706,6 +4835,7 @@ dependencies = [
"serde", "serde",
"settings", "settings",
"smallvec", "smallvec",
"strum",
"telemetry", "telemetry",
"theme", "theme",
"ui", "ui",
@@ -5109,8 +5239,8 @@ dependencies = [
"fsevent", "fsevent",
"futures 0.3.31", "futures 0.3.31",
"git", "git",
"git2",
"gpui", "gpui",
"ignore",
"libc", "libc",
"log", "log",
"notify 6.1.1", "notify 6.1.1",
@@ -5137,7 +5267,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4"
dependencies = [ dependencies = [
"io-lifetimes", "io-lifetimes",
"rustix", "rustix",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -5472,17 +5602,20 @@ dependencies = [
"serde_json", "serde_json",
"smol", "smol",
"sum_tree", "sum_tree",
"tempfile",
"text", "text",
"time", "time",
"unindent", "unindent",
"url", "url",
"util", "util",
"uuid",
] ]
[[package]] [[package]]
name = "git2" name = "git2"
version = "0.20.0" version = "0.20.1"
source = "git+https://github.com/rust-lang/git2-rs?rev=a3b90cb3756c1bb63e2317bf9cfa57838178de5c#a3b90cb3756c1bb63e2317bf9cfa57838178de5c" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5220b8ba44c68a9a7f7a7659e864dd73692e417ef0211bea133c7b74e031eeb9"
dependencies = [ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"libc", "libc",
@@ -5504,8 +5637,10 @@ dependencies = [
"indoc", "indoc",
"pretty_assertions", "pretty_assertions",
"regex", "regex",
"schemars",
"serde", "serde",
"serde_json", "serde_json",
"settings",
"url", "url",
"util", "util",
] ]
@@ -5549,7 +5684,6 @@ dependencies = [
"serde_derive", "serde_derive",
"serde_json", "serde_json",
"settings", "settings",
"smallvec",
"strum", "strum",
"telemetry", "telemetry",
"theme", "theme",
@@ -5747,7 +5881,7 @@ dependencies = [
"strum", "strum",
"sum_tree", "sum_tree",
"taffy", "taffy",
"thiserror 1.0.69", "thiserror 2.0.12",
"unicode-segmentation", "unicode-segmentation",
"usvg", "usvg",
"util", "util",
@@ -6201,13 +6335,19 @@ dependencies = [
"futures 0.3.31", "futures 0.3.31",
"http 1.2.0", "http 1.2.0",
"log", "log",
"rustls 0.23.23",
"rustls-platform-verifier",
"serde", "serde",
"serde_json", "serde_json",
"url", "url",
] ]
[[package]]
name = "http_client_tls"
version = "0.1.0"
dependencies = [
"rustls 0.23.23",
"rustls-platform-verifier",
]
[[package]] [[package]]
name = "httparse" name = "httparse"
version = "1.9.5" version = "1.9.5"
@@ -6786,7 +6926,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65" checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65"
dependencies = [ dependencies = [
"io-lifetimes", "io-lifetimes",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -7189,7 +7329,7 @@ dependencies = [
"smol", "smol",
"strum", "strum",
"telemetry_events", "telemetry_events",
"thiserror 1.0.69", "thiserror 2.0.12",
"ui", "ui",
"util", "util",
] ]
@@ -7415,8 +7555,9 @@ dependencies = [
[[package]] [[package]]
name = "libgit2-sys" name = "libgit2-sys"
version = "0.18.0+1.9.0" version = "0.18.1+1.9.0"
source = "git+https://github.com/rust-lang/git2-rs?rev=a3b90cb3756c1bb63e2317bf9cfa57838178de5c#a3b90cb3756c1bb63e2317bf9cfa57838178de5c" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1dcb20f84ffcdd825c7a311ae347cce604a6f084a767dec4a4929829645290e"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@@ -7431,7 +7572,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"windows-targets 0.52.6", "windows-targets 0.48.5",
] ]
[[package]] [[package]]
@@ -9550,7 +9691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
dependencies = [ dependencies = [
"memchr", "memchr",
"thiserror 2.0.6", "thiserror 2.0.12",
"ucd-trie", "ucd-trie",
] ]
@@ -10406,9 +10547,12 @@ dependencies = [
"askpass", "askpass",
"async-trait", "async-trait",
"buffer_diff", "buffer_diff",
"circular-buffer",
"client", "client",
"clock", "clock",
"collections", "collections",
"dap",
"dap_adapters",
"env_logger 0.11.7", "env_logger 0.11.7",
"extension", "extension",
"fancy-regex 0.14.0", "fancy-regex 0.14.0",
@@ -10421,6 +10565,7 @@ dependencies = [
"gpui", "gpui",
"http_client", "http_client",
"image", "image",
"indexmap",
"itertools 0.14.0", "itertools 0.14.0",
"language", "language",
"log", "log",
@@ -10818,7 +10963,7 @@ dependencies = [
"rustc-hash 2.1.1", "rustc-hash 2.1.1",
"rustls 0.23.23", "rustls 0.23.23",
"socket2", "socket2",
"thiserror 2.0.6", "thiserror 2.0.12",
"tokio", "tokio",
"tracing", "tracing",
] ]
@@ -10837,7 +10982,7 @@ dependencies = [
"rustls 0.23.23", "rustls 0.23.23",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
"thiserror 2.0.6", "thiserror 2.0.12",
"tinyvec", "tinyvec",
"tracing", "tracing",
"web-time", "web-time",
@@ -10854,7 +10999,7 @@ dependencies = [
"once_cell", "once_cell",
"socket2", "socket2",
"tracing", "tracing",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -11084,6 +11229,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"auto_update", "auto_update",
"dap",
"editor", "editor",
"extension_host", "extension_host",
"file_finder", "file_finder",
@@ -11245,7 +11391,8 @@ dependencies = [
"shlex", "shlex",
"smol", "smol",
"tempfile", "tempfile",
"thiserror 1.0.69", "thiserror 2.0.12",
"urlencoding",
"util", "util",
] ]
@@ -11460,6 +11607,7 @@ dependencies = [
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"http_client", "http_client",
"http_client_tls",
"log", "log",
"regex", "regex",
"reqwest 0.12.8", "reqwest 0.12.8",
@@ -11776,7 +11924,7 @@ dependencies = [
"libc", "libc",
"linux-raw-sys", "linux-raw-sys",
"once_cell", "once_cell",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -12323,9 +12471,9 @@ dependencies = [
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.25" version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
dependencies = [ dependencies = [
"serde", "serde",
] ]
@@ -12558,6 +12706,13 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shell_parser"
version = "0.1.0"
dependencies = [
"shlex",
]
[[package]] [[package]]
name = "shellexpand" name = "shellexpand"
version = "2.1.2" version = "2.1.2"
@@ -12951,7 +13106,7 @@ dependencies = [
"serde_json", "serde_json",
"sha2", "sha2",
"smallvec", "smallvec",
"thiserror 2.0.6", "thiserror 2.0.12",
"time", "time",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
@@ -13040,7 +13195,7 @@ dependencies = [
"smallvec", "smallvec",
"sqlx-core", "sqlx-core",
"stringprep", "stringprep",
"thiserror 2.0.6", "thiserror 2.0.12",
"time", "time",
"tracing", "tracing",
"uuid", "uuid",
@@ -13083,7 +13238,7 @@ dependencies = [
"smallvec", "smallvec",
"sqlx-core", "sqlx-core",
"stringprep", "stringprep",
"thiserror 2.0.6", "thiserror 2.0.12",
"time", "time",
"tracing", "tracing",
"uuid", "uuid",
@@ -13568,7 +13723,7 @@ dependencies = [
"fd-lock", "fd-lock",
"io-lifetimes", "io-lifetimes",
"rustix", "rustix",
"windows-sys 0.59.0", "windows-sys 0.52.0",
"winx", "winx",
] ]
@@ -13639,12 +13794,14 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collections", "collections",
"dap-types",
"futures 0.3.31", "futures 0.3.31",
"gpui", "gpui",
"hex", "hex",
"parking_lot", "parking_lot",
"schemars", "schemars",
"serde", "serde",
"serde_json",
"serde_json_lenient", "serde_json_lenient",
"sha2", "sha2",
"shellexpand 2.1.2", "shellexpand 2.1.2",
@@ -13657,7 +13814,9 @@ name = "tasks_ui"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"debugger_ui",
"editor", "editor",
"feature_flags",
"file_icons", "file_icons",
"fuzzy", "fuzzy",
"gpui", "gpui",
@@ -13708,7 +13867,7 @@ dependencies = [
"getrandom 0.3.1", "getrandom 0.3.1",
"once_cell", "once_cell",
"rustix", "rustix",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -13753,7 +13912,7 @@ dependencies = [
"sysinfo", "sysinfo",
"task", "task",
"theme", "theme",
"thiserror 1.0.69", "thiserror 2.0.12",
"util", "util",
"windows 0.60.0", "windows 0.60.0",
] ]
@@ -13848,7 +14007,7 @@ dependencies = [
"serde_repr", "serde_repr",
"settings", "settings",
"strum", "strum",
"thiserror 1.0.69", "thiserror 2.0.12",
"util", "util",
"uuid", "uuid",
] ]
@@ -13914,11 +14073,11 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.6" version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
dependencies = [ dependencies = [
"thiserror-impl 2.0.6", "thiserror-impl 2.0.12",
] ]
[[package]] [[package]]
@@ -13934,9 +14093,9 @@ dependencies = [
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "2.0.6" version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -14874,6 +15033,19 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "ui_prompt"
version = "0.1.0"
dependencies = [
"gpui",
"markdown",
"menu",
"settings",
"theme",
"ui",
"workspace",
]
[[package]] [[package]]
name = "unicase" name = "unicase"
version = "2.8.1" version = "2.8.1"
@@ -16089,7 +16261,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.48.0",
] ]
[[package]] [[package]]
@@ -16649,7 +16821,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
dependencies = [ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"windows-sys 0.59.0", "windows-sys 0.52.0",
] ]
[[package]] [[package]]
@@ -17170,7 +17342,7 @@ dependencies = [
[[package]] [[package]]
name = "zed" name = "zed"
version = "0.179.0" version = "0.180.0"
dependencies = [ dependencies = [
"activity_indicator", "activity_indicator",
"anyhow", "anyhow",
@@ -17200,6 +17372,8 @@ dependencies = [
"component_preview", "component_preview",
"copilot", "copilot",
"db", "db",
"debugger_tools",
"debugger_ui",
"diagnostics", "diagnostics",
"editor", "editor",
"env_logger 0.11.7", "env_logger 0.11.7",
@@ -17231,7 +17405,6 @@ dependencies = [
"languages", "languages",
"libc", "libc",
"log", "log",
"markdown",
"markdown_preview", "markdown_preview",
"menu", "menu",
"migrator", "migrator",
@@ -17283,6 +17456,7 @@ dependencies = [
"tree-sitter-md", "tree-sitter-md",
"tree-sitter-rust", "tree-sitter-rust",
"ui", "ui",
"ui_prompt",
"url", "url",
"urlencoding", "urlencoding",
"util", "util",
@@ -17569,7 +17743,7 @@ dependencies = [
"telemetry", "telemetry",
"telemetry_events", "telemetry_events",
"theme", "theme",
"thiserror 1.0.69", "thiserror 2.0.12",
"tree-sitter-go", "tree-sitter-go",
"tree-sitter-rust", "tree-sitter-rust",
"ui", "ui",

View File

@@ -37,6 +37,10 @@ members = [
"crates/context_server_settings", "crates/context_server_settings",
"crates/copilot", "crates/copilot",
"crates/credentials_provider", "crates/credentials_provider",
"crates/dap",
"crates/dap_adapters",
"crates/debugger_tools",
"crates/debugger_ui",
"crates/db", "crates/db",
"crates/deepseek", "crates/deepseek",
"crates/diagnostics", "crates/diagnostics",
@@ -65,6 +69,7 @@ members = [
"crates/gpui_tokio", "crates/gpui_tokio",
"crates/html_to_markdown", "crates/html_to_markdown",
"crates/http_client", "crates/http_client",
"crates/http_client_tls",
"crates/image_viewer", "crates/image_viewer",
"crates/indexed_docs", "crates/indexed_docs",
"crates/inline_completion", "crates/inline_completion",
@@ -126,6 +131,7 @@ members = [
"crates/session", "crates/session",
"crates/settings", "crates/settings",
"crates/settings_ui", "crates/settings_ui",
"crates/shell_parser",
"crates/snippet", "crates/snippet",
"crates/snippet_provider", "crates/snippet_provider",
"crates/snippets_ui", "crates/snippets_ui",
@@ -155,6 +161,7 @@ members = [
"crates/ui", "crates/ui",
"crates/ui_input", "crates/ui_input",
"crates/ui_macros", "crates/ui_macros",
"crates/ui_prompt",
"crates/util", "crates/util",
"crates/util_macros", "crates/util_macros",
"crates/vim", "crates/vim",
@@ -235,7 +242,11 @@ context_server = { path = "crates/context_server" }
context_server_settings = { path = "crates/context_server_settings" } context_server_settings = { path = "crates/context_server_settings" }
copilot = { path = "crates/copilot" } copilot = { path = "crates/copilot" }
credentials_provider = { path = "crates/credentials_provider" } credentials_provider = { path = "crates/credentials_provider" }
dap = { path = "crates/dap" }
dap_adapters = { path = "crates/dap_adapters" }
db = { path = "crates/db" } db = { path = "crates/db" }
debugger_ui = { path = "crates/debugger_ui" }
debugger_tools = { path = "crates/debugger_tools" }
deepseek = { path = "crates/deepseek" } deepseek = { path = "crates/deepseek" }
diagnostics = { path = "crates/diagnostics" } diagnostics = { path = "crates/diagnostics" }
buffer_diff = { path = "crates/buffer_diff" } buffer_diff = { path = "crates/buffer_diff" }
@@ -262,6 +273,7 @@ gpui_macros = { path = "crates/gpui_macros" }
gpui_tokio = { path = "crates/gpui_tokio" } gpui_tokio = { path = "crates/gpui_tokio" }
html_to_markdown = { path = "crates/html_to_markdown" } html_to_markdown = { path = "crates/html_to_markdown" }
http_client = { path = "crates/http_client" } http_client = { path = "crates/http_client" }
http_client_tls = { path = "crates/http_client_tls" }
image_viewer = { path = "crates/image_viewer" } image_viewer = { path = "crates/image_viewer" }
indexed_docs = { path = "crates/indexed_docs" } indexed_docs = { path = "crates/indexed_docs" }
inline_completion = { path = "crates/inline_completion" } inline_completion = { path = "crates/inline_completion" }
@@ -352,6 +364,7 @@ toolchain_selector = { path = "crates/toolchain_selector" }
ui = { path = "crates/ui" } ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" } ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" } ui_macros = { path = "crates/ui_macros" }
ui_prompt = { path = "crates/ui_prompt" }
util = { path = "crates/util" } util = { path = "crates/util" }
util_macros = { path = "crates/util_macros" } util_macros = { path = "crates/util_macros" }
vim = { path = "crates/vim" } vim = { path = "crates/vim" }
@@ -400,6 +413,7 @@ bytes = "1.0"
cargo_metadata = "0.19" cargo_metadata = "0.19"
cargo_toml = "0.21" cargo_toml = "0.21"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
circular-buffer = "1.0"
clap = { version = "4.4", features = ["derive"] } clap = { version = "4.4", features = ["derive"] }
cocoa = "0.26" cocoa = "0.26"
cocoa-foundation = "0.2.0" cocoa-foundation = "0.2.0"
@@ -408,6 +422,7 @@ core-foundation = "0.9.3"
core-foundation-sys = "0.8.6" core-foundation-sys = "0.8.6"
ctor = "0.4.0" ctor = "0.4.0"
dashmap = "6.0" dashmap = "6.0"
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "bfd4af0" }
derive_more = "0.99.17" derive_more = "0.99.17"
dirs = "4.0" dirs = "4.0"
ec4rs = "1.1" ec4rs = "1.1"
@@ -419,8 +434,7 @@ fork = "0.2.0"
futures = "0.3" futures = "0.3"
futures-batch = "0.6.1" futures-batch = "0.6.1"
futures-lite = "1.13" futures-lite = "1.13"
# TODO: get back to regular versions when https://github.com/rust-lang/git2-rs/pull/1120 is released git2 = { version = "0.20.1", default-features = false }
git2 = { git = "https://github.com/rust-lang/git2-rs", rev = "a3b90cb3756c1bb63e2317bf9cfa57838178de5c", default-features = false }
globset = "0.4" globset = "0.4"
handlebars = "4.3" handlebars = "4.3"
heed = { version = "0.21.0", features = ["read-txn-no-tls"] } heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
@@ -522,7 +536,7 @@ sys-locale = "0.3.1"
sysinfo = "0.31.0" sysinfo = "0.31.0"
take-until = "0.2.0" take-until = "0.2.0"
tempfile = "3.9.0" tempfile = "3.9.0"
thiserror = "1.0.29" thiserror = "2.0.12"
tiktoken-rs = "0.6.0" tiktoken-rs = "0.6.0"
time = { version = "0.3", features = [ time = { version = "0.3", features = [
"macros", "macros",
@@ -563,6 +577,7 @@ unindent = "0.2.0"
unicode-segmentation = "1.10" unicode-segmentation = "1.10"
unicode-script = "0.5.7" unicode-script = "0.5.7"
url = "2.2" url = "2.2"
urlencoding = "2.1.2"
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] } uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
wasmparser = "0.221" wasmparser = "0.221"
wasm-encoder = "0.221" wasm-encoder = "0.221"

1
assets/icons/debug.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bug"><path d="m8 2 1.88 1.88"/><path d="M14.12 3.88 16 2"/><path d="M9 7.13v-1a3.003 3.003 0 1 1 6 0v1"/><path d="M12 20c-3.3 0-6-2.7-6-6v-3a4 4 0 0 1 4-4h4a4 4 0 0 1 4 4v3c0 3.3-2.7 6-6 6"/><path d="M12 20v-9"/><path d="M6.53 9C4.6 8.8 3 7.1 3 5"/><path d="M6 13H2"/><path d="M3 21c0-2.1 1.7-3.9 3.8-4"/><path d="M20.97 5c0 2.1-1.6 3.8-3.5 4"/><path d="M22 13h-4"/><path d="M17.2 17c2.1.1 3.8 1.9 3.8 4"/></svg>

After

Width:  |  Height:  |  Size: 615 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle"><circle cx="12" cy="12" r="10"/></svg>

After

Width:  |  Height:  |  Size: 257 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-step-forward"><line x1="6" x2="6" y1="4" y2="20"/><polygon points="10,4 20,12 10,20"/></svg>

After

Width:  |  Height:  |  Size: 295 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-unplug"><path d="m19 5 3-3"/><path d="m2 22 3-3"/><path d="M6.3 20.3a2.4 2.4 0 0 0 3.4 0L12 18l-6-6-2.3 2.3a2.4 2.4 0 0 0 0 3.4Z"/><path d="M7.5 13.5 10 11"/><path d="M10.5 16.5 13 14"/><path d="m12 6 6 6 2.3-2.3a2.4 2.4 0 0 0 0-3.4l-2.6-2.6a2.4 2.4 0 0 0-3.4 0Z"/></svg>

After

Width:  |  Height:  |  Size: 474 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle-off"><path d="m2 2 20 20"/><path d="M8.35 2.69A10 10 0 0 1 21.3 15.65"/><path d="M19.08 19.08A10 10 0 1 1 4.92 4.92"/></svg>

After

Width:  |  Height:  |  Size: 334 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-message-circle"><path d="M7.9 20A9 9 0 1 0 4 16.1L2 22Z"/></svg>

After

Width:  |  Height:  |  Size: 275 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pause"><rect x="14" y="4" width="4" height="16" rx="1"/><rect x="6" y="4" width="4" height="16" rx="1"/></svg>

After

Width:  |  Height:  |  Size: 313 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-rotate-ccw"><path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"/><path d="M3 3v5h5"/></svg>

After

Width:  |  Height:  |  Size: 302 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo-dot"><path d="M21 17a9 9 0 0 0-15-6.7L3 13"/><path d="M3 7v6h6"/><circle cx="12" cy="17" r="1"/></svg>

After

Width:  |  Height:  |  Size: 310 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
<path d="m5 15 7 7 7-7"/>
<path d="M12 8v14"/>
<circle cx="12" cy="3" r="1"/>
</svg>

After

Width:  |  Height:  |  Size: 313 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
<path d="m3 10 9-8 9 8"/>
<path d="M12 17V2"/>
<circle cx="12" cy="21" r="1"/>
</svg>

After

Width:  |  Height:  |  Size: 314 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-redo-dot">
<circle cx="12" cy="17" r="1"/>
<path d="M21 7v6h-6"/>
<path d="M3 17a9 9 0 0 1 9-9 9 9 0 0 1 6 2.3l3 2.7"/>
</svg>

After

Width:  |  Height:  |  Size: 335 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square"><rect width="18" height="18" x="3" y="3" rx="2"/></svg>

After

Width:  |  Height:  |  Size: 266 B

View File

@@ -30,6 +30,13 @@
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
"ctrl-,": "zed::OpenSettings", "ctrl-,": "zed::OpenSettings",
"ctrl-q": "zed::Quit", "ctrl-q": "zed::Quit",
"f4": "debugger::Start",
"f5": "debugger::Continue",
"shift-f5": "debugger::Stop",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"cmd-f11": "debugger::StepInto",
"shift-f11": "debugger::StepOut",
"f11": "zed::ToggleFullScreen", "f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "edit_prediction::RateCompletions", "ctrl-alt-z": "edit_prediction::RateCompletions",
"ctrl-shift-i": "edit_prediction::ToggleMenu" "ctrl-shift-i": "edit_prediction::ToggleMenu"
@@ -46,7 +53,9 @@
"context": "Prompt", "context": "Prompt",
"bindings": { "bindings": {
"left": "menu::SelectPrevious", "left": "menu::SelectPrevious",
"right": "menu::SelectNext" "right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
} }
}, },
{ {
@@ -107,6 +116,7 @@
"ctrl-a": "editor::SelectAll", "ctrl-a": "editor::SelectAll",
"ctrl-l": "editor::SelectLine", "ctrl-l": "editor::SelectLine",
"ctrl-shift-i": "editor::Format", "ctrl-shift-i": "editor::Format",
"alt-shift-o": "editor::OrganizeImports",
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }], // "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }], // "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }], "shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
@@ -123,7 +133,9 @@
"alt-g b": "editor::ToggleGitBlame", "alt-g b": "editor::ToggleGitBlame",
"menu": "editor::OpenContextMenu", "menu": "editor::OpenContextMenu",
"shift-f10": "editor::OpenContextMenu", "shift-f10": "editor::OpenContextMenu",
"ctrl-shift-e": "editor::ToggleEditPrediction" "ctrl-shift-e": "editor::ToggleEditPrediction",
"f9": "editor::ToggleBreakpoint",
"shift-f9": "editor::EditLogBreakpoint"
} }
}, },
{ {
@@ -742,6 +754,8 @@
"escape": "git_panel::ToggleFocus", "escape": "git_panel::ToggleFocus",
"ctrl-enter": "git::Commit", "ctrl-enter": "git::Commit",
"alt-enter": "menu::SecondaryConfirm", "alt-enter": "menu::SecondaryConfirm",
"delete": "git::RestoreFile",
"shift-delete": "git::RestoreFile",
"backspace": "git::RestoreFile" "backspace": "git::RestoreFile"
} }
}, },

View File

@@ -14,6 +14,13 @@
{ {
"use_key_equivalents": true, "use_key_equivalents": true,
"bindings": { "bindings": {
"f4": "debugger::Start",
"f5": "debugger::Continue",
"shift-f5": "debugger::Stop",
"f6": "debugger::Pause",
"f7": "debugger::StepOver",
"f11": "debugger::StepInto",
"shift-f11": "debugger::StepOut",
"home": "menu::SelectFirst", "home": "menu::SelectFirst",
"shift-pageup": "menu::SelectFirst", "shift-pageup": "menu::SelectFirst",
"pageup": "menu::SelectFirst", "pageup": "menu::SelectFirst",
@@ -148,6 +155,8 @@
"cmd-\"": "editor::ExpandAllDiffHunks", "cmd-\"": "editor::ExpandAllDiffHunks",
"cmd-alt-g b": "editor::ToggleGitBlame", "cmd-alt-g b": "editor::ToggleGitBlame",
"cmd-i": "editor::ShowSignatureHelp", "cmd-i": "editor::ShowSignatureHelp",
"f9": "editor::ToggleBreakpoint",
"shift-f9": "editor::EditLogBreakpoint",
"ctrl-f12": "editor::GoToDeclaration", "ctrl-f12": "editor::GoToDeclaration",
"alt-ctrl-f12": "editor::GoToDeclarationSplit", "alt-ctrl-f12": "editor::GoToDeclarationSplit",
"ctrl-cmd-e": "editor::ToggleEditPrediction" "ctrl-cmd-e": "editor::ToggleEditPrediction"
@@ -696,6 +705,16 @@
"ctrl-]": "assistant::CycleNextInlineAssist" "ctrl-]": "assistant::CycleNextInlineAssist"
} }
}, },
{
"context": "Prompt",
"use_key_equivalents": true,
"bindings": {
"left": "menu::SelectPrevious",
"right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
}
},
{ {
"context": "ProjectSearchBar && !in_replace", "context": "ProjectSearchBar && !in_replace",
"use_key_equivalents": true, "use_key_equivalents": true,
@@ -756,6 +775,14 @@
"space": "project_panel::Open" "space": "project_panel::Open"
} }
}, },
{
"context": "VariableList",
"use_key_equivalents": true,
"bindings": {
"left": "variable_list::CollapseSelectedEntry",
"right": "variable_list::ExpandSelectedEntry"
}
},
{ {
"context": "GitPanel && ChangesList", "context": "GitPanel && ChangesList",
"use_key_equivalents": true, "use_key_equivalents": true,
@@ -774,6 +801,8 @@
"shift-tab": "git_panel::FocusEditor", "shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus", "escape": "git_panel::ToggleFocus",
"cmd-enter": "git::Commit", "cmd-enter": "git::Commit",
"delete": "git::RestoreFile",
"cmd-backspace": "git::RestoreFile",
"backspace": "git::RestoreFile" "backspace": "git::RestoreFile"
} }
}, },

View File

@@ -3,7 +3,14 @@
"bindings": { "bindings": {
"ctrl-alt-s": "zed::OpenSettings", "ctrl-alt-s": "zed::OpenSettings",
"ctrl-{": "pane::ActivatePreviousItem", "ctrl-{": "pane::ActivatePreviousItem",
"ctrl-}": "pane::ActivateNextItem" "ctrl-}": "pane::ActivateNextItem",
"ctrl-f2": "debugger::Stop",
"f6": "debugger::Pause",
"f7": "debugger::StepInto",
"f8": "debugger::StepOver",
"shift-f8": "debugger::StepOut",
"f9": "debugger::Continue",
"alt-shift-f9": "debugger::Start"
} }
}, },
{ {
@@ -31,6 +38,7 @@
"shift-alt-up": "editor::MoveLineUp", "shift-alt-up": "editor::MoveLineUp",
"shift-alt-down": "editor::MoveLineDown", "shift-alt-down": "editor::MoveLineDown",
"ctrl-alt-l": "editor::Format", "ctrl-alt-l": "editor::Format",
"ctrl-alt-o": "editor::OrganizeImports",
"shift-f6": "editor::Rename", "shift-f6": "editor::Rename",
"ctrl-alt-left": "pane::GoBack", "ctrl-alt-left": "pane::GoBack",
"ctrl-alt-right": "pane::GoForward", "ctrl-alt-right": "pane::GoForward",
@@ -48,7 +56,9 @@
"ctrl-home": "editor::MoveToBeginning", "ctrl-home": "editor::MoveToBeginning",
"ctrl-end": "editor::MoveToEnd", "ctrl-end": "editor::MoveToEnd",
"ctrl-shift-home": "editor::SelectToBeginning", "ctrl-shift-home": "editor::SelectToBeginning",
"ctrl-shift-end": "editor::SelectToEnd" "ctrl-shift-end": "editor::SelectToEnd",
"ctrl-f8": "editor::ToggleBreakpoint",
"ctrl-shift-f8": "editor::EditLogBreakpoint"
} }
}, },
{ {

View File

@@ -2,7 +2,14 @@
{ {
"bindings": { "bindings": {
"cmd-{": "pane::ActivatePreviousItem", "cmd-{": "pane::ActivatePreviousItem",
"cmd-}": "pane::ActivateNextItem" "cmd-}": "pane::ActivateNextItem",
"ctrl-f2": "debugger::Stop",
"f6": "debugger::Pause",
"f7": "debugger::StepInto",
"f8": "debugger::StepOver",
"shift-f8": "debugger::StepOut",
"f9": "debugger::Continue",
"alt-shift-f9": "debugger::Start"
} }
}, },
{ {
@@ -29,6 +36,7 @@
"shift-alt-up": "editor::MoveLineUp", "shift-alt-up": "editor::MoveLineUp",
"shift-alt-down": "editor::MoveLineDown", "shift-alt-down": "editor::MoveLineDown",
"cmd-alt-l": "editor::Format", "cmd-alt-l": "editor::Format",
"ctrl-alt-o": "editor::OrganizeImports",
"shift-f6": "editor::Rename", "shift-f6": "editor::Rename",
"cmd-[": "pane::GoBack", "cmd-[": "pane::GoBack",
"cmd-]": "pane::GoForward", "cmd-]": "pane::GoForward",
@@ -45,7 +53,9 @@
"cmd-home": "editor::MoveToBeginning", "cmd-home": "editor::MoveToBeginning",
"cmd-end": "editor::MoveToEnd", "cmd-end": "editor::MoveToEnd",
"cmd-shift-home": "editor::SelectToBeginning", "cmd-shift-home": "editor::SelectToBeginning",
"cmd-shift-end": "editor::SelectToEnd" "cmd-shift-end": "editor::SelectToEnd",
"ctrl-f8": "editor::ToggleBreakpoint",
"ctrl-shift-f8": "editor::EditLogBreakpoint"
} }
}, },
{ {

View File

@@ -136,6 +136,11 @@
// Whether to use the system provided dialogs for Open and Save As. // Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers. // When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true, "use_system_path_prompts": true,
// Whether to use the system provided dialogs for prompts, such as confirmation
// prompts.
// When set to false, Zed will use its built-in prompts. Note that on Linux,
// this option is ignored and Zed will always use the built-in prompts.
"use_system_prompts": true,
// Whether the cursor blinks in the editor. // Whether the cursor blinks in the editor.
"cursor_blink": true, "cursor_blink": true,
// Cursor shape for the default editor. // Cursor shape for the default editor.
@@ -324,6 +329,8 @@
"code_actions": true, "code_actions": true,
// Whether to show runnables buttons in the gutter. // Whether to show runnables buttons in the gutter.
"runnables": true, "runnables": true,
// Whether to show breakpoints in the gutter.
"breakpoints": true,
// Whether to show fold buttons in the gutter. // Whether to show fold buttons in the gutter.
"folds": true "folds": true
}, },
@@ -850,8 +857,24 @@
// //
// The minimum column number to show the inline blame information at // The minimum column number to show the inline blame information at
// "min_column": 0 // "min_column": 0
} },
// How git hunks are displayed visually in the editor.
// This setting can take two values:
//
// 1. Show unstaged hunks filled and staged hunks hollow:
// "hunk_style": "staged_hollow"
// 2. Show unstaged hunks hollow and staged hunks filled:
// "hunk_style": "unstaged_hollow"
"hunk_style": "staged_hollow"
}, },
// The list of custom Git hosting providers.
"git_hosting_providers": [
// {
// "provider": "github",
// "name": "BigCorp GitHub",
// "base_url": "https://code.big-corp.com"
// }
],
// Configuration for how direnv configuration should be loaded. May take 2 values: // Configuration for how direnv configuration should be loaded. May take 2 values:
// 1. Load direnv configuration using `direnv export json` directly. // 1. Load direnv configuration using `direnv export json` directly.
// "load_direnv": "direct" // "load_direnv": "direct"
@@ -1437,6 +1460,12 @@
// } // }
// ] // ]
"ssh_connections": [], "ssh_connections": [],
// Configures context servers for use in the Assistant. // Configures context servers for use in the Assistant.
"context_servers": {} "context_servers": {},
"debugger": {
"stepping_granularity": "line",
"save_breakpoints": true,
"button": true
}
} }

View File

@@ -0,0 +1,32 @@
[
{
"label": "Debug active PHP file",
"adapter": "php",
"program": "$ZED_FILE",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT"
},
{
"label": "Debug active Python file",
"adapter": "python",
"program": "$ZED_FILE",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT"
},
{
"label": "Debug active JavaScript file",
"adapter": "javascript",
"program": "$ZED_FILE",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT"
},
{
"label": "JavaScript debug terminal",
"adapter": "javascript",
"request": "launch",
"cwd": "$ZED_WORKTREE_ROOT",
"initialize_args": {
"console": "integratedTerminal"
}
}
]

View File

@@ -20,7 +20,6 @@ extension_host.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true
language.workspace = true language.workspace = true
lsp.workspace = true
project.workspace = true project.workspace = true
smallvec.workspace = true smallvec.workspace = true
ui.workspace = true ui.workspace = true

View File

@@ -7,8 +7,7 @@ use gpui::{
EventEmitter, InteractiveElement as _, ParentElement as _, Render, SharedString, EventEmitter, InteractiveElement as _, ParentElement as _, Render, SharedString,
StatefulInteractiveElement, Styled, Transformation, Window, StatefulInteractiveElement, Styled, Transformation, Window,
}; };
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId}; use language::{BinaryStatus, LanguageRegistry, LanguageServerId};
use lsp::LanguageServerName;
use project::{ use project::{
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project, EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
ProjectEnvironmentEvent, WorktreeId, ProjectEnvironmentEvent, WorktreeId,
@@ -23,21 +22,21 @@ actions!(activity_indicator, [ShowErrorMessage]);
pub enum Event { pub enum Event {
ShowError { ShowError {
lsp_name: LanguageServerName, server_name: SharedString,
error: String, error: String,
}, },
} }
pub struct ActivityIndicator { pub struct ActivityIndicator {
statuses: Vec<LspStatus>, statuses: Vec<ServerStatus>,
project: Entity<Project>, project: Entity<Project>,
auto_updater: Option<Entity<AutoUpdater>>, auto_updater: Option<Entity<AutoUpdater>>,
context_menu_handle: PopoverMenuHandle<ContextMenu>, context_menu_handle: PopoverMenuHandle<ContextMenu>,
} }
struct LspStatus { struct ServerStatus {
name: LanguageServerName, name: SharedString,
status: LanguageServerBinaryStatus, status: BinaryStatus,
} }
struct PendingWork<'a> { struct PendingWork<'a> {
@@ -64,11 +63,24 @@ impl ActivityIndicator {
let auto_updater = AutoUpdater::get(cx); let auto_updater = AutoUpdater::get(cx);
let this = cx.new(|cx| { let this = cx.new(|cx| {
let mut status_events = languages.language_server_binary_statuses(); let mut status_events = languages.language_server_binary_statuses();
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
while let Some((name, status)) = status_events.next().await { while let Some((name, status)) = status_events.next().await {
this.update(&mut cx, |this: &mut ActivityIndicator, cx| { this.update(cx, |this: &mut ActivityIndicator, cx| {
this.statuses.retain(|s| s.name != name); this.statuses.retain(|s| s.name != name);
this.statuses.push(LspStatus { name, status }); this.statuses.push(ServerStatus { name, status });
cx.notify();
})?;
}
anyhow::Ok(())
})
.detach();
let mut status_events = languages.dap_server_binary_statuses();
cx.spawn(async move |this, cx| {
while let Some((name, status)) = status_events.next().await {
this.update(cx, |this, cx| {
this.statuses.retain(|s| s.name != name);
this.statuses.push(ServerStatus { name, status });
cx.notify(); cx.notify();
})?; })?;
} }
@@ -106,25 +118,25 @@ impl ActivityIndicator {
}); });
cx.subscribe_in(&this, window, move |_, _, event, window, cx| match event { cx.subscribe_in(&this, window, move |_, _, event, window, cx| match event {
Event::ShowError { lsp_name, error } => { Event::ShowError { server_name, error } => {
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx)); let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
let project = project.clone(); let project = project.clone();
let error = error.clone(); let error = error.clone();
let lsp_name = lsp_name.clone(); let server_name = server_name.clone();
cx.spawn_in(window, |workspace, mut cx| async move { cx.spawn_in(window, async move |workspace, cx| {
let buffer = create_buffer.await?; let buffer = create_buffer.await?;
buffer.update(&mut cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
buffer.edit( buffer.edit(
[( [(
0..0, 0..0,
format!("Language server error: {}\n\n{}", lsp_name, error), format!("Language server error: {}\n\n{}", server_name, error),
)], )],
None, None,
cx, cx,
); );
buffer.set_capability(language::Capability::ReadOnly, cx); buffer.set_capability(language::Capability::ReadOnly, cx);
})?; })?;
workspace.update_in(&mut cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane( workspace.add_item_to_active_pane(
Box::new(cx.new(|cx| { Box::new(cx.new(|cx| {
Editor::for_buffer(buffer, Some(project.clone()), window, cx) Editor::for_buffer(buffer, Some(project.clone()), window, cx)
@@ -147,9 +159,9 @@ impl ActivityIndicator {
fn show_error_message(&mut self, _: &ShowErrorMessage, _: &mut Window, cx: &mut Context<Self>) { fn show_error_message(&mut self, _: &ShowErrorMessage, _: &mut Window, cx: &mut Context<Self>) {
self.statuses.retain(|status| { self.statuses.retain(|status| {
if let LanguageServerBinaryStatus::Failed { error } = &status.status { if let BinaryStatus::Failed { error } = &status.status {
cx.emit(Event::ShowError { cx.emit(Event::ShowError {
lsp_name: status.name.clone(), server_name: status.name.clone(),
error: error.clone(), error: error.clone(),
}); });
false false
@@ -278,12 +290,10 @@ impl ActivityIndicator {
let mut failed = SmallVec::<[_; 3]>::new(); let mut failed = SmallVec::<[_; 3]>::new();
for status in &self.statuses { for status in &self.statuses {
match status.status { match status.status {
LanguageServerBinaryStatus::CheckingForUpdate => { BinaryStatus::CheckingForUpdate => checking_for_update.push(status.name.clone()),
checking_for_update.push(status.name.clone()) BinaryStatus::Downloading => downloading.push(status.name.clone()),
} BinaryStatus::Failed { .. } => failed.push(status.name.clone()),
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()), BinaryStatus::None => {}
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
LanguageServerBinaryStatus::None => {}
} }
} }
@@ -296,7 +306,7 @@ impl ActivityIndicator {
), ),
message: format!( message: format!(
"Downloading {}...", "Downloading {}...",
downloading.iter().map(|name| name.0.as_ref()).fold( downloading.iter().map(|name| name.as_ref()).fold(
String::new(), String::new(),
|mut acc, s| { |mut acc, s| {
if !acc.is_empty() { if !acc.is_empty() {
@@ -324,7 +334,7 @@ impl ActivityIndicator {
), ),
message: format!( message: format!(
"Checking for updates to {}...", "Checking for updates to {}...",
checking_for_update.iter().map(|name| name.0.as_ref()).fold( checking_for_update.iter().map(|name| name.as_ref()).fold(
String::new(), String::new(),
|mut acc, s| { |mut acc, s| {
if !acc.is_empty() { if !acc.is_empty() {
@@ -354,7 +364,7 @@ impl ActivityIndicator {
"Failed to run {}. Click to show error.", "Failed to run {}. Click to show error.",
failed failed
.iter() .iter()
.map(|name| name.0.as_ref()) .map(|name| name.as_ref())
.fold(String::new(), |mut acc, s| { .fold(String::new(), |mut acc, s| {
if !acc.is_empty() { if !acc.is_empty() {
acc.push_str(", "); acc.push_str(", ");

View File

@@ -34,9 +34,9 @@ impl AskPassDelegate {
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static, password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
) -> Self { ) -> Self {
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>(); let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
let task = cx.spawn(|mut cx| async move { let task = cx.spawn(async move |cx: &mut AsyncApp| {
while let Some((prompt, channel)) = rx.next().await { while let Some((prompt, channel)) = rx.next().await {
password_prompt(prompt, channel, &mut cx); password_prompt(prompt, channel, cx);
} }
}); });
Self { tx, _task: task } Self { tx, _task: task }

View File

@@ -98,9 +98,9 @@ pub fn init(
AssistantSettings::register(cx); AssistantSettings::register(cx);
SlashCommandSettings::register(cx); SlashCommandSettings::register(cx);
cx.spawn(|mut cx| { cx.spawn({
let client = client.clone(); let client = client.clone();
async move { async move |cx| {
let is_search_slash_command_enabled = cx let is_search_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())? .update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
.await; .await;
@@ -116,7 +116,7 @@ pub fn init(
let semantic_index = SemanticDb::new( let semantic_index = SemanticDb::new(
paths::embeddings_dir().join("semantic-index-db.0.mdb"), paths::embeddings_dir().join("semantic-index-db.0.mdb"),
Arc::new(embedding_provider), Arc::new(embedding_provider),
&mut cx, cx,
) )
.await?; .await?;

View File

@@ -98,16 +98,16 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>, prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext, cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> { ) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
let slash_commands = Arc::new(SlashCommandWorkingSet::default()); let slash_commands = Arc::new(SlashCommandWorkingSet::default());
let context_store = workspace let context_store = workspace
.update(&mut cx, |workspace, cx| { .update(cx, |workspace, cx| {
let project = workspace.project().clone(); let project = workspace.project().clone();
ContextStore::new(project, prompt_builder.clone(), slash_commands, cx) ContextStore::new(project, prompt_builder.clone(), slash_commands, cx)
})? })?
.await?; .await?;
workspace.update_in(&mut cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
// TODO: deserialize state. // TODO: deserialize state.
cx.new(|cx| Self::new(workspace, context_store, window, cx)) cx.new(|cx| Self::new(workspace, context_store, window, cx))
}) })
@@ -357,9 +357,9 @@ impl AssistantPanel {
) -> Task<()> { ) -> Task<()> {
let mut status_rx = client.status(); let mut status_rx = client.status();
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
while let Some(status) = status_rx.next().await { while let Some(status) = status_rx.next().await {
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if this.client_status.is_none() if this.client_status.is_none()
|| this || this
.client_status .client_status
@@ -371,7 +371,7 @@ impl AssistantPanel {
}) })
.log_err(); .log_err();
} }
this.update(&mut cx, |this, _cx| this.watch_client_status = None) this.update(cx, |this, _cx| this.watch_client_status = None)
.log_err(); .log_err();
}) })
} }
@@ -576,11 +576,11 @@ impl AssistantPanel {
if self.authenticate_provider_task.is_none() { if self.authenticate_provider_task.is_none() {
self.authenticate_provider_task = Some(( self.authenticate_provider_task = Some((
provider.id(), provider.id(),
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
if let Some(future) = load_credentials { if let Some(future) = load_credentials {
let _ = future.await; let _ = future.await;
} }
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.authenticate_provider_task = None; this.authenticate_provider_task = None;
}) })
.log_err(); .log_err();
@@ -641,9 +641,9 @@ impl AssistantPanel {
} }
} else { } else {
let assistant_panel = assistant_panel.downgrade(); let assistant_panel = assistant_panel.downgrade();
cx.spawn_in(window, |workspace, mut cx| async move { cx.spawn_in(window, async move |workspace, cx| {
let Some(task) = let Some(task) =
assistant_panel.update(&mut cx, |assistant, cx| assistant.authenticate(cx))? assistant_panel.update(cx, |assistant, cx| assistant.authenticate(cx))?
else { else {
let answer = cx let answer = cx
.prompt( .prompt(
@@ -665,7 +665,7 @@ impl AssistantPanel {
return Ok(()); return Ok(());
}; };
task.await?; task.await?;
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? { if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx))? {
cx.update(|window, cx| match inline_assist_target { cx.update(|window, cx| match inline_assist_target {
InlineAssistTarget::Editor(active_editor, include_context) => { InlineAssistTarget::Editor(active_editor, include_context) => {
let assistant_panel = if include_context { let assistant_panel = if include_context {
@@ -698,7 +698,7 @@ impl AssistantPanel {
} }
})? })?
} else { } else {
workspace.update_in(&mut cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx) workspace.focus_panel::<AssistantPanel>(window, cx)
})?; })?;
} }
@@ -791,10 +791,10 @@ impl AssistantPanel {
.context_store .context_store
.update(cx, |store, cx| store.create_remote_context(cx)); .update(cx, |store, cx| store.create_remote_context(cx));
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let context = task.await?; let context = task.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
let workspace = this.workspace.clone(); let workspace = this.workspace.clone();
let project = this.project.clone(); let project = this.project.clone();
let lsp_adapter_delegate = let lsp_adapter_delegate =
@@ -847,9 +847,9 @@ impl AssistantPanel {
self.show_context(editor.clone(), window, cx); self.show_context(editor.clone(), window, cx);
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
cx.spawn_in(window, move |_, mut cx| async move { cx.spawn_in(window, async move |_, cx| {
workspace workspace
.update_in(&mut cx, |workspace, window, cx| { .update_in(cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx); workspace.focus_panel::<AssistantPanel>(window, cx);
}) })
.ok(); .ok();
@@ -1069,8 +1069,8 @@ impl AssistantPanel {
.filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path)) .filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path))
}); });
if let Some(existing_context) = existing_context { if let Some(existing_context) = existing_context {
return cx.spawn_in(window, |this, mut cx| async move { return cx.spawn_in(window, async move |this, cx| {
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.show_context(existing_context, window, cx) this.show_context(existing_context, window, cx)
}) })
}); });
@@ -1085,9 +1085,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten(); let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let context = context.await?; let context = context.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| { let editor = cx.new(|cx| {
ContextEditor::for_context( ContextEditor::for_context(
context, context,
@@ -1117,8 +1117,8 @@ impl AssistantPanel {
.filter(|editor| *editor.read(cx).context().read(cx).id() == id) .filter(|editor| *editor.read(cx).context().read(cx).id() == id)
}); });
if let Some(existing_context) = existing_context { if let Some(existing_context) = existing_context {
return cx.spawn_in(window, |this, mut cx| async move { return cx.spawn_in(window, async move |this, cx| {
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.show_context(existing_context.clone(), window, cx) this.show_context(existing_context.clone(), window, cx)
})?; })?;
Ok(existing_context) Ok(existing_context)
@@ -1134,9 +1134,9 @@ impl AssistantPanel {
.log_err() .log_err()
.flatten(); .flatten();
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let context = context.await?; let context = context.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| { let editor = cx.new(|cx| {
ContextEditor::for_context( ContextEditor::for_context(
context, context,

View File

@@ -1311,9 +1311,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(), assist_ids: Vec::new(),
scroll_lock: None, scroll_lock: None,
highlight_updates: highlight_updates_tx, highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn(|cx| { _update_highlights: cx.spawn({
let editor = editor.downgrade(); let editor = editor.downgrade();
async move { async move |cx| {
while let Ok(()) = highlight_updates_rx.changed().await { while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?; let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| { cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1850,7 +1850,7 @@ impl PromptEditor {
fn count_tokens(&mut self, cx: &mut Context<Self>) { fn count_tokens(&mut self, cx: &mut Context<Self>) {
let assist_id = self.id; let assist_id = self.id;
self.pending_token_count = cx.spawn(|this, mut cx| async move { self.pending_token_count = cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(1)).await; cx.background_executor().timer(Duration::from_secs(1)).await;
let token_count = cx let token_count = cx
.update_global(|inline_assistant: &mut InlineAssistant, cx| { .update_global(|inline_assistant: &mut InlineAssistant, cx| {
@@ -1862,7 +1862,7 @@ impl PromptEditor {
})?? })??
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.token_counts = Some(token_count); this.token_counts = Some(token_count);
cx.notify(); cx.notify();
}) })
@@ -2882,7 +2882,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, assistant_panel_context, cx)?; let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
self.request = Some(request.clone()); self.request = Some(request.clone());
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }) cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
.boxed_local() .boxed_local()
}; };
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx); self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -2999,213 +2999,207 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new())); let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone(); let completion_clone = completion.clone();
self.generation = cx.spawn(|codegen, mut cx| { self.generation = cx.spawn(async move |codegen, cx| {
async move { let stream = stream.await;
let stream = stream.await; let message_id = stream
let message_id = stream .as_ref()
.as_ref() .ok()
.ok() .and_then(|stream| stream.message_id.clone());
.and_then(|stream| stream.message_id.clone()); let generate = async {
let generate = async { let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let (mut diff_tx, mut diff_rx) = mpsc::channel(1); let executor = cx.background_executor().clone();
let executor = cx.background_executor().clone(); let message_id = message_id.clone();
let message_id = message_id.clone(); let line_based_stream_diff: Task<anyhow::Result<()>> =
let line_based_stream_diff: Task<anyhow::Result<()>> = cx.background_spawn(async move {
cx.background_spawn(async move { let mut response_latency = None;
let mut response_latency = None; let request_start = Instant::now();
let request_start = Instant::now(); let diff = async {
let diff = async { let chunks = StripInvalidSpans::new(stream?.stream);
let chunks = StripInvalidSpans::new(stream?.stream); futures::pin_mut!(chunks);
futures::pin_mut!(chunks); let mut diff = StreamingDiff::new(selected_text.to_string());
let mut diff = StreamingDiff::new(selected_text.to_string()); let mut line_diff = LineDiff::default();
let mut line_diff = LineDiff::default();
let mut new_text = String::new(); let mut new_text = String::new();
let mut base_indent = None; let mut base_indent = None;
let mut line_indent = None; let mut line_indent = None;
let mut first_line = true; let mut first_line = true;
while let Some(chunk) = chunks.next().await { while let Some(chunk) = chunks.next().await {
if response_latency.is_none() { if response_latency.is_none() {
response_latency = Some(request_start.elapsed()); response_latency = Some(request_start.elapsed());
} }
let chunk = chunk?; let chunk = chunk?;
completion_clone.lock().push_str(&chunk); completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable(); let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() { while let Some(line) = lines.next() {
new_text.push_str(line); new_text.push_str(line);
if line_indent.is_none() { if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) = if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace()) new_text.find(|ch: char| !ch.is_whitespace())
{ {
line_indent = Some(non_whitespace_ch_ix); line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent); base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap(); let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap(); let base_indent = base_indent.unwrap();
let indent_delta = let indent_delta =
line_indent as i32 - base_indent as i32; line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max( let mut corrected_indent_len = cmp::max(
0, 0,
suggested_line_indent.len as i32 + indent_delta, suggested_line_indent.len as i32 + indent_delta,
) )
as usize; as usize;
if first_line { if first_line {
corrected_indent_len = corrected_indent_len corrected_indent_len = corrected_indent_len
.saturating_sub( .saturating_sub(
selection_start.column as usize, selection_start.column as usize,
); );
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
} }
}
if line_indent.is_some() { let indent_char = suggested_line_indent.char();
let char_ops = diff.push_new(&new_text); let mut indent_buffer = [0; 4];
line_diff let indent_str =
.push_char_operations(&char_ops, &selected_text); indent_char.encode_utf8(&mut indent_buffer);
diff_tx new_text.replace_range(
.send((char_ops, line_diff.line_operations())) ..line_indent,
.await?; &indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear(); new_text.clear();
} }
line_indent = None;
if lines.peek().is_some() { first_line = false;
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
} }
} }
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
} }
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify(); let mut char_ops = diff.push_new(&new_text);
})?; char_ops.extend(diff.finish());
} line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. anyhow::Ok(())
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. };
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(()) let result = diff.await;
};
let result = generate.await; let error_message = result.as_ref().err().map(|error| error.to_string());
let elapsed_time = start_time.elapsed().as_secs_f64(); report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
codegen result?;
.update(&mut cx, |this, cx| { Ok(())
this.message_id = message_id; });
this.last_equal_ranges.clear();
if let Err(error) = result { while let Some((char_ops, line_ops)) = diff_rx.next().await {
this.status = CodegenStatus::Error(error); codegen.update(cx, |codegen, cx| {
} else { codegen.last_equal_ranges.clear();
this.status = CodegenStatus::Done;
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
} }
this.elapsed_time = Some(elapsed_time); codegen.edits.extend(edits);
this.completion = Some(completion.lock().clone()); codegen.line_operations = line_ops;
cx.emit(CodegenEvent::Finished); codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify(); cx.notify();
}) })?;
.ok(); }
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
}); });
cx.notify(); cx.notify();
} }
@@ -3323,7 +3317,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx); let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot); let new_range = self.range.to_point(&new_snapshot);
cx.spawn(|codegen, mut cx| async move { cx.spawn(async move |codegen, cx| {
let (deleted_row_ranges, inserted_row_ranges) = cx let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move { .background_spawn(async move {
let old_text = old_snapshot let old_text = old_snapshot
@@ -3373,7 +3367,7 @@ impl CodegenAlternative {
.await; .await;
codegen codegen
.update(&mut cx, |codegen, cx| { .update(cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges; codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges; codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify(); cx.notify();
@@ -3569,6 +3563,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
title: "Fix with Assistant".into(), title: "Fix with Assistant".into(),
..Default::default() ..Default::default()
})), })),
resolved: true,
}])) }]))
} else { } else {
Task::ready(Ok(Vec::new())) Task::ready(Ok(Vec::new()))
@@ -3586,10 +3581,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
) -> Task<Result<ProjectTransaction>> { ) -> Task<Result<ProjectTransaction>> {
let editor = self.editor.clone(); let editor = self.editor.clone();
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
window.spawn(cx, |mut cx| async move { window.spawn(cx, async move |cx| {
let editor = editor.upgrade().context("editor was released")?; let editor = editor.upgrade().context("editor was released")?;
let range = editor let range = editor
.update(&mut cx, |editor, cx| { .update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| { editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx); let multibuffer_snapshot = multibuffer.read(cx);
@@ -3624,7 +3619,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
}) })
})? })?
.context("invalid range")?; .context("invalid range")?;
let assistant_panel = workspace.update(&mut cx, |workspace, cx| { let assistant_panel = workspace.update(cx, |workspace, cx| {
workspace workspace
.panel::<AssistantPanel>(cx) .panel::<AssistantPanel>(cx)
.context("assistant panel was released") .context("assistant panel was released")

View File

@@ -825,7 +825,7 @@ impl PromptEditor {
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else { let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
return; return;
}; };
self.pending_token_count = cx.spawn(|this, mut cx| async move { self.pending_token_count = cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(1)).await; cx.background_executor().timer(Duration::from_secs(1)).await;
let request = let request =
cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| { cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| {
@@ -833,7 +833,7 @@ impl PromptEditor {
})??; })??;
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?; let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.token_count = Some(token_count); this.token_count = Some(token_count);
cx.notify(); cx.notify();
}) })
@@ -1140,7 +1140,7 @@ impl Codegen {
let telemetry = self.telemetry.clone(); let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending; self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone())); self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(|this, mut cx| async move { self.generation = cx.spawn(async move |this, cx| {
let model_telemetry_id = model.telemetry_id(); let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id(); let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await; let response = model.stream_completion_text(prompt, &cx).await;
@@ -1197,12 +1197,12 @@ impl Codegen {
} }
}); });
this.update(&mut cx, |this, _| { this.update(cx, |this, _| {
this.message_id = message_id; this.message_id = message_id;
})?; })?;
while let Some(hunk) = hunks_rx.next().await { while let Some(hunk) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Some(transaction) = &mut this.transaction { if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx); transaction.push(hunk, cx);
cx.notify(); cx.notify();
@@ -1216,7 +1216,7 @@ impl Codegen {
let result = generate.await; let result = generate.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Err(error) = result { if let Err(error) = result {
this.status = CodegenStatus::Error(error); this.status = CodegenStatus::Error(error);
} else { } else {

View File

@@ -39,6 +39,7 @@ fs.workspace = true
futures.workspace = true futures.workspace = true
fuzzy.workspace = true fuzzy.workspace = true
git.workspace = true git.workspace = true
git_ui.workspace = true
gpui.workspace = true gpui.workspace = true
heed.workspace = true heed.workspace = true
html_to_markdown.workspace = true html_to_markdown.workspace = true

View File

@@ -116,7 +116,7 @@ impl ActiveThread {
pub fn cancel_last_completion(&mut self, cx: &mut App) -> bool { pub fn cancel_last_completion(&mut self, cx: &mut App) -> bool {
self.last_error.take(); self.last_error.take();
self.thread self.thread
.update(cx, |thread, _cx| thread.cancel_last_completion()) .update(cx, |thread, cx| thread.cancel_last_completion(cx))
} }
pub fn last_error(&self) -> Option<ThreadError> { pub fn last_error(&self) -> Option<ThreadError> {
@@ -343,8 +343,11 @@ impl ActiveThread {
}); });
} }
ThreadEvent::ToolFinished { ThreadEvent::ToolFinished {
pending_tool_use, .. pending_tool_use,
canceled,
..
} => { } => {
let canceled = *canceled;
if let Some(tool_use) = pending_tool_use { if let Some(tool_use) = pending_tool_use {
self.render_scripting_tool_use_markdown( self.render_scripting_tool_use_markdown(
tool_use.id.clone(), tool_use.id.clone(),
@@ -358,7 +361,7 @@ impl ActiveThread {
if self.thread.read(cx).all_tools_finished() { if self.thread.read(cx).all_tools_finished() {
let pending_refresh_buffers = self.thread.update(cx, |thread, cx| { let pending_refresh_buffers = self.thread.update(cx, |thread, cx| {
thread.action_log().update(cx, |action_log, _cx| { thread.action_log().update(cx, |action_log, _cx| {
action_log.take_pending_refresh_buffers() action_log.take_stale_buffers_in_context()
}) })
}); });
@@ -369,10 +372,10 @@ impl ActiveThread {
cx, cx,
); );
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let updated_context_ids = refresh_task.await; let updated_context_ids = refresh_task.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.context_store.read_with(cx, |context_store, cx| { this.context_store.read_with(cx, |context_store, cx| {
context_store context_store
.context() .context()
@@ -391,12 +394,15 @@ impl ActiveThread {
let model_registry = LanguageModelRegistry::read_global(cx); let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() { if let Some(model) = model_registry.active_model() {
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let updated_context = context_update_task.await?; let updated_context = context_update_task.await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.thread.update(cx, |thread, cx| { this.thread.update(cx, |thread, cx| {
thread.send_tool_results_to_model(model, updated_context, cx); thread.attach_tool_results(updated_context, cx);
if !canceled {
thread.send_to_model(model, RequestKind::Chat, cx);
}
}); });
}) })
}) })
@@ -412,9 +418,9 @@ impl ActiveThread {
/// Only one task to save the thread will be in flight at a time. /// Only one task to save the thread will be in flight at a time.
fn save_thread(&mut self, cx: &mut Context<Self>) { fn save_thread(&mut self, cx: &mut Context<Self>) {
let thread = self.thread.clone(); let thread = self.thread.clone();
self.save_thread_task = Some(cx.spawn(|this, mut cx| async move { self.save_thread_task = Some(cx.spawn(async move |this, cx| {
let task = this let task = this
.update(&mut cx, |this, cx| { .update(cx, |this, cx| {
this.thread_store this.thread_store
.update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx)) .update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx))
}) })
@@ -544,6 +550,7 @@ impl ActiveThread {
let thread = self.thread.read(cx); let thread = self.thread.read(cx);
// Get all the data we need from thread before we start using it in closures // Get all the data we need from thread before we start using it in closures
let checkpoint = thread.checkpoint_for_message(message_id);
let context = thread.context_for_message(message_id); let context = thread.context_for_message(message_id);
let tool_uses = thread.tool_uses_for_message(message_id); let tool_uses = thread.tool_uses_for_message(message_id);
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id); let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
@@ -577,7 +584,7 @@ impl ActiveThread {
.p_2p5() .p_2p5()
.child(edit_message_editor) .child(edit_message_editor)
} else { } else {
div().p_2p5().text_ui(cx).child(markdown.clone()) div().text_ui(cx).child(markdown.clone())
}, },
) )
.when_some(context, |parent, context| { .when_some(context, |parent, context| {
@@ -597,15 +604,16 @@ impl ActiveThread {
let styled_message = match message.role { let styled_message = match message.role {
Role::User => v_flex() Role::User => v_flex()
.id(("message-container", ix)) .id(("message-container", ix))
.pt_2p5() .pt_2()
.px_2p5() .pl_2()
.pr_2p5()
.child( .child(
v_flex() v_flex()
.bg(colors.editor_background) .bg(colors.editor_background)
.rounded_lg() .rounded_lg()
.border_1() .border_1()
.border_color(colors.border) .border_color(colors.border)
.shadow_sm() .shadow_md()
.child( .child(
h_flex() h_flex()
.py_1() .py_1()
@@ -696,12 +704,12 @@ impl ActiveThread {
}, },
), ),
) )
.child(message_content), .child(div().p_2().child(message_content)),
), ),
Role::Assistant => { Role::Assistant => {
v_flex() v_flex()
.id(("message-container", ix)) .id(("message-container", ix))
.child(message_content) .child(div().py_3().px_4().child(message_content))
.when( .when(
!tool_uses.is_empty() || !scripting_tool_uses.is_empty(), !tool_uses.is_empty() || !scripting_tool_uses.is_empty(),
|parent| { |parent| {
@@ -723,11 +731,29 @@ impl ActiveThread {
v_flex() v_flex()
.bg(colors.editor_background) .bg(colors.editor_background)
.rounded_sm() .rounded_sm()
.child(message_content), .child(div().p_4().child(message_content)),
), ),
}; };
styled_message.into_any() v_flex()
.when_some(checkpoint, |parent, checkpoint| {
parent.child(
h_flex().pl_2().child(
Button::new("restore-checkpoint", "Restore Checkpoint")
.icon(IconName::Undo)
.size(ButtonSize::Compact)
.on_click(cx.listener(move |this, _, _window, cx| {
this.thread.update(cx, |thread, cx| {
thread
.restore_checkpoint(checkpoint.clone(), cx)
.detach_and_log_err(cx);
});
})),
),
)
})
.child(styled_message)
.into_any()
} }
fn render_tool_use(&self, tool_use: ToolUse, cx: &mut Context<Self>) -> impl IntoElement { fn render_tool_use(&self, tool_use: ToolUse, cx: &mut Context<Self>) -> impl IntoElement {
@@ -739,7 +765,7 @@ impl ActiveThread {
let lighter_border = cx.theme().colors().border.opacity(0.5); let lighter_border = cx.theme().colors().border.opacity(0.5);
div().px_2p5().child( div().px_4().child(
v_flex() v_flex()
.rounded_lg() .rounded_lg()
.border_1() .border_1()

View File

@@ -0,0 +1,59 @@
use std::sync::Arc;
use collections::HashMap;
use gpui::SharedString;
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: HashMap<Arc<str>, bool>,
#[allow(dead_code)]
pub context_servers: HashMap<Arc<str>, ContextServerPreset>,
}
#[derive(Debug, Clone)]
pub struct ContextServerPreset {
#[allow(dead_code)]
pub tools: HashMap<Arc<str>, bool>,
}
impl AgentProfile {
pub fn read_only() -> Self {
Self {
name: "Read-only".into(),
tools: HashMap::from_iter([
("diagnostics".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
pub fn code_writer() -> Self {
Self {
name: "Code Writer".into(),
tools: HashMap::from_iter([
("bash".into(), true),
("delete-path".into(), true),
("diagnostics".into(), true),
("edit-files".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
}

View File

@@ -1,4 +1,5 @@
mod active_thread; mod active_thread;
mod agent_profile;
mod assistant_configuration; mod assistant_configuration;
mod assistant_model_selector; mod assistant_model_selector;
mod assistant_panel; mod assistant_panel;

View File

@@ -1,19 +1,33 @@
use std::sync::Arc; use std::sync::Arc;
use assistant_tool::{ToolSource, ToolWorkingSet};
use collections::HashMap; use collections::HashMap;
use gpui::{Action, AnyView, App, EventEmitter, FocusHandle, Focusable, Subscription}; use context_server::manager::ContextServerManager;
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry}; use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use ui::{prelude::*, Divider, DividerColor, ElevationIndex}; use ui::{
prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip,
};
use util::ResultExt as _;
use zed_actions::assistant::DeployPromptLibrary; use zed_actions::assistant::DeployPromptLibrary;
use zed_actions::ExtensionCategoryFilter;
pub struct AssistantConfiguration { pub struct AssistantConfiguration {
focus_handle: FocusHandle, focus_handle: FocusHandle,
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>, configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
context_server_manager: Entity<ContextServerManager>,
expanded_context_server_tools: HashMap<Arc<str>, bool>,
tools: Arc<ToolWorkingSet>,
_registry_subscription: Subscription, _registry_subscription: Subscription,
} }
impl AssistantConfiguration { impl AssistantConfiguration {
pub fn new(window: &mut Window, cx: &mut Context<Self>) -> Self { pub fn new(
context_server_manager: Entity<ContextServerManager>,
tools: Arc<ToolWorkingSet>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle(); let focus_handle = cx.focus_handle();
let registry_subscription = cx.subscribe_in( let registry_subscription = cx.subscribe_in(
@@ -36,6 +50,9 @@ impl AssistantConfiguration {
let mut this = Self { let mut this = Self {
focus_handle, focus_handle,
configuration_views_by_provider: HashMap::default(), configuration_views_by_provider: HashMap::default(),
context_server_manager,
expanded_context_server_tools: HashMap::default(),
tools,
_registry_subscription: registry_subscription, _registry_subscription: registry_subscription,
}; };
this.build_provider_configuration_views(window, cx); this.build_provider_configuration_views(window, cx);
@@ -143,6 +160,185 @@ impl AssistantConfiguration {
}), }),
) )
} }
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
let tools_by_source = self.tools.tools_by_source(cx);
let empty = Vec::new();
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.mt_1()
.gap_2()
.flex_1()
.child(
v_flex()
.gap_0p5()
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
.child(Label::new(SUBHEADING).color(Color::Muted)),
)
.children(context_servers.into_iter().map(|context_server| {
let is_running = context_server.client().is_some();
let are_tools_expanded = self
.expanded_context_server_tools
.get(&context_server.id())
.copied()
.unwrap_or_default();
let tools = tools_by_source
.get(&ToolSource::ContextServer {
id: context_server.id().into(),
})
.unwrap_or_else(|| &empty);
let tool_count = tools.len();
v_flex()
.border_1()
.rounded_sm()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.justify_between()
.px_2()
.py_1()
.when(are_tools_expanded, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(
h_flex()
.gap_2()
.child(
Disclosure::new("tool-list-disclosure", are_tools_expanded)
.on_click(cx.listener({
let context_server_id = context_server.id();
move |this, _event, _window, _cx| {
let is_open = this
.expanded_context_server_tools
.entry(context_server_id.clone())
.or_insert(false);
*is_open = !*is_open;
}
})),
)
.child(Indicator::dot().color(if is_running {
Color::Success
} else {
Color::Error
}))
.child(Label::new(context_server.id()))
.child(
Label::new(format!("{tool_count} tools"))
.color(Color::Muted),
),
)
.child(h_flex().child(
Switch::new("context-server-switch", is_running.into()).on_click({
let context_server_manager =
self.context_server_manager.clone();
let context_server = context_server.clone();
move |state, _window, cx| match state {
ToggleState::Unselected | ToggleState::Indeterminate => {
context_server_manager.update(cx, |this, cx| {
this.stop_server(context_server.clone(), cx)
.log_err();
});
}
ToggleState::Selected => {
cx.spawn({
let context_server_manager =
context_server_manager.clone();
let context_server = context_server.clone();
async move |cx| {
if let Some(start_server_task) =
context_server_manager
.update(cx, |this, cx| {
this.start_server(
context_server,
cx,
)
})
.log_err()
{
start_server_task.await.log_err();
}
}
})
.detach();
}
}
}),
)),
)
.map(|parent| {
if !are_tools_expanded {
return parent;
}
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|(ix, tool)| {
h_flex()
.px_2()
.py_1()
.when(ix < tool_count - 1, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(Label::new(tool.name()))
},
)))
})
}))
.child(
h_flex()
.justify_between()
.gap_2()
.child(
h_flex().w_full().child(
Button::new("add-context-server", "Add Context Server")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.disabled(true)
.tooltip(Tooltip::text("Not yet implemented")),
),
)
.child(
h_flex().w_full().child(
Button::new(
"install-context-server-extensions",
"Install Context Server Extensions",
)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::DatabaseZap)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.on_click(|_event, window, cx| {
window.dispatch_action(
zed_actions::Extensions {
category_filter: Some(
ExtensionCategoryFilter::ContextServers,
),
}
.boxed_clone(),
cx,
)
}),
),
),
)
}
} }
impl Render for AssistantConfiguration { impl Render for AssistantConfiguration {
@@ -182,6 +378,8 @@ impl Render for AssistantConfiguration {
), ),
) )
.child(Divider::horizontal().color(DividerColor::Border)) .child(Divider::horizontal().color(DividerColor::Border))
.child(self.render_context_servers_section(cx))
.child(Divider::horizontal().color(DividerColor::Border))
.child( .child(
v_flex() v_flex()
.p(DynamicSpacing::Base16.rems(cx)) .p(DynamicSpacing::Base16.rems(cx))

View File

@@ -110,19 +110,16 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>, prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext, cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> { ) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
let tools = Arc::new(ToolWorkingSet::default()); let tools = Arc::new(ToolWorkingSet::default());
log::info!("[assistant2-debug] initializing ThreadStore"); let thread_store = workspace.update(cx, |workspace, cx| {
let thread_store = workspace.update(&mut cx, |workspace, cx| {
let project = workspace.project().clone(); let project = workspace.project().clone();
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx) ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
})??; })??;
log::info!("[assistant2-debug] finished initializing ThreadStore");
let slash_commands = Arc::new(SlashCommandWorkingSet::default()); let slash_commands = Arc::new(SlashCommandWorkingSet::default());
log::info!("[assistant2-debug] initializing ContextStore");
let context_store = workspace let context_store = workspace
.update(&mut cx, |workspace, cx| { .update(cx, |workspace, cx| {
let project = workspace.project().clone(); let project = workspace.project().clone();
assistant_context_editor::ContextStore::new( assistant_context_editor::ContextStore::new(
project, project,
@@ -132,9 +129,8 @@ impl AssistantPanel {
) )
})? })?
.await?; .await?;
log::info!("[assistant2-debug] finished initializing ContextStore");
workspace.update_in(&mut cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx)) cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
}) })
}) })
@@ -147,7 +143,6 @@ impl AssistantPanel {
window: &mut Window, window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
log::info!("[assistant2-debug] AssistantPanel::new");
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx)); let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
let fs = workspace.app_state().fs.clone(); let fs = workspace.app_state().fs.clone();
let project = workspace.project().clone(); let project = workspace.project().clone();
@@ -349,9 +344,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten(); let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let context = context.await?; let context = context.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| { let editor = cx.new(|cx| {
ContextEditor::for_context( ContextEditor::for_context(
context, context,
@@ -382,9 +377,9 @@ impl AssistantPanel {
.thread_store .thread_store
.update(cx, |this, cx| this.open_thread(thread_id, cx)); .update(cx, |this, cx| this.open_thread(thread_id, cx));
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let thread = open_thread_task.await?; let thread = open_thread_task.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.active_view = ActiveView::Thread; this.active_view = ActiveView::Thread;
let message_editor_context_store = let message_editor_context_store =
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone())); cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
@@ -415,8 +410,13 @@ impl AssistantPanel {
} }
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) { pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let context_server_manager = self.thread_store.read(cx).context_server_manager();
let tools = self.thread_store.read(cx).tools();
self.active_view = ActiveView::Configuration; self.active_view = ActiveView::Configuration;
self.configuration = Some(cx.new(|cx| AssistantConfiguration::new(window, cx))); self.configuration = Some(
cx.new(|cx| AssistantConfiguration::new(context_server_manager, tools, window, cx)),
);
if let Some(configuration) = self.configuration.as_ref() { if let Some(configuration) = self.configuration.as_ref() {
self.configuration_subscription = Some(cx.subscribe_in( self.configuration_subscription = Some(cx.subscribe_in(
@@ -450,10 +450,10 @@ impl AssistantPanel {
.languages .languages
.language_for_name("Markdown"); .language_for_name("Markdown");
let thread = self.active_thread(cx); let thread = self.active_thread(cx);
cx.spawn_in(window, |_this, mut cx| async move { cx.spawn_in(window, async move |_this, cx| {
let markdown_language = markdown_language_task.await?; let markdown_language = markdown_language_task.await?;
workspace.update_in(&mut cx, |workspace, window, cx| { workspace.update_in(cx, |workspace, window, cx| {
let thread = thread.read(cx); let thread = thread.read(cx);
let markdown = thread.to_markdown()?; let markdown = thread.to_markdown()?;
let thread_summary = thread let thread_summary = thread

View File

@@ -367,7 +367,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, cx)?; let request = self.build_request(user_prompt, cx)?;
self.request = Some(request.clone()); self.request = Some(request.clone());
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }) cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
.boxed_local() .boxed_local()
}; };
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx); self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -480,213 +480,207 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new())); let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone(); let completion_clone = completion.clone();
self.generation = cx.spawn(|codegen, mut cx| { self.generation = cx.spawn(async move |codegen, cx| {
async move { let stream = stream.await;
let stream = stream.await; let message_id = stream
let message_id = stream .as_ref()
.as_ref() .ok()
.ok() .and_then(|stream| stream.message_id.clone());
.and_then(|stream| stream.message_id.clone()); let generate = async {
let generate = async { let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let (mut diff_tx, mut diff_rx) = mpsc::channel(1); let executor = cx.background_executor().clone();
let executor = cx.background_executor().clone(); let message_id = message_id.clone();
let message_id = message_id.clone(); let line_based_stream_diff: Task<anyhow::Result<()>> =
let line_based_stream_diff: Task<anyhow::Result<()>> = cx.background_spawn(async move {
cx.background_spawn(async move { let mut response_latency = None;
let mut response_latency = None; let request_start = Instant::now();
let request_start = Instant::now(); let diff = async {
let diff = async { let chunks = StripInvalidSpans::new(stream?.stream);
let chunks = StripInvalidSpans::new(stream?.stream); futures::pin_mut!(chunks);
futures::pin_mut!(chunks); let mut diff = StreamingDiff::new(selected_text.to_string());
let mut diff = StreamingDiff::new(selected_text.to_string()); let mut line_diff = LineDiff::default();
let mut line_diff = LineDiff::default();
let mut new_text = String::new(); let mut new_text = String::new();
let mut base_indent = None; let mut base_indent = None;
let mut line_indent = None; let mut line_indent = None;
let mut first_line = true; let mut first_line = true;
while let Some(chunk) = chunks.next().await { while let Some(chunk) = chunks.next().await {
if response_latency.is_none() { if response_latency.is_none() {
response_latency = Some(request_start.elapsed()); response_latency = Some(request_start.elapsed());
} }
let chunk = chunk?; let chunk = chunk?;
completion_clone.lock().push_str(&chunk); completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable(); let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() { while let Some(line) = lines.next() {
new_text.push_str(line); new_text.push_str(line);
if line_indent.is_none() { if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) = if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace()) new_text.find(|ch: char| !ch.is_whitespace())
{ {
line_indent = Some(non_whitespace_ch_ix); line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent); base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap(); let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap(); let base_indent = base_indent.unwrap();
let indent_delta = let indent_delta =
line_indent as i32 - base_indent as i32; line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max( let mut corrected_indent_len = cmp::max(
0, 0,
suggested_line_indent.len as i32 + indent_delta, suggested_line_indent.len as i32 + indent_delta,
) )
as usize; as usize;
if first_line { if first_line {
corrected_indent_len = corrected_indent_len corrected_indent_len = corrected_indent_len
.saturating_sub( .saturating_sub(
selection_start.column as usize, selection_start.column as usize,
); );
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
} }
}
if line_indent.is_some() { let indent_char = suggested_line_indent.char();
let char_ops = diff.push_new(&new_text); let mut indent_buffer = [0; 4];
line_diff let indent_str =
.push_char_operations(&char_ops, &selected_text); indent_char.encode_utf8(&mut indent_buffer);
diff_tx new_text.replace_range(
.send((char_ops, line_diff.line_operations())) ..line_indent,
.await?; &indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear(); new_text.clear();
} }
line_indent = None;
if lines.peek().is_some() { first_line = false;
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
} }
} }
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
} }
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify(); let mut char_ops = diff.push_new(&new_text);
})?; char_ops.extend(diff.finish());
} line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. anyhow::Ok(())
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. };
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(()) let result = diff.await;
};
let result = generate.await; let error_message = result.as_ref().err().map(|error| error.to_string());
let elapsed_time = start_time.elapsed().as_secs_f64(); report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
codegen result?;
.update(&mut cx, |this, cx| { Ok(())
this.message_id = message_id; });
this.last_equal_ranges.clear();
if let Err(error) = result { while let Some((char_ops, line_ops)) = diff_rx.next().await {
this.status = CodegenStatus::Error(error); codegen.update(cx, |codegen, cx| {
} else { codegen.last_equal_ranges.clear();
this.status = CodegenStatus::Done;
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
} }
this.elapsed_time = Some(elapsed_time); codegen.edits.extend(edits);
this.completion = Some(completion.lock().clone()); codegen.line_operations = line_ops;
cx.emit(CodegenEvent::Finished); codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify(); cx.notify();
}) })?;
.ok(); }
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
}); });
cx.notify(); cx.notify();
} }
@@ -804,7 +798,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx); let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot); let new_range = self.range.to_point(&new_snapshot);
cx.spawn(|codegen, mut cx| async move { cx.spawn(async move |codegen, cx| {
let (deleted_row_ranges, inserted_row_ranges) = cx let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move { .background_spawn(async move {
let old_text = old_snapshot let old_text = old_snapshot
@@ -854,7 +848,7 @@ impl CodegenAlternative {
.await; .await;
codegen codegen
.update(&mut cx, |codegen, cx| { .update(cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges; codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges; codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify(); cx.notify();

View File

@@ -43,15 +43,6 @@ pub enum ContextKind {
} }
impl ContextKind { impl ContextKind {
pub fn label(&self) -> &'static str {
match self {
ContextKind::File => "File",
ContextKind::Directory => "Folder",
ContextKind::FetchedUrl => "Fetch",
ContextKind::Thread => "Thread",
}
}
pub fn icon(&self) -> IconName { pub fn icon(&self) -> IconName {
match self { match self {
ContextKind::File => IconName::File, ContextKind::File => IconName::File,

View File

@@ -1,4 +1,3 @@
mod directory_context_picker;
mod fetch_context_picker; mod fetch_context_picker;
mod file_context_picker; mod file_context_picker;
mod thread_context_picker; mod thread_context_picker;
@@ -15,8 +14,6 @@ use thread_context_picker::{render_thread_context_entry, ThreadContextEntry};
use ui::{prelude::*, ContextMenu, ContextMenuEntry, ContextMenuItem}; use ui::{prelude::*, ContextMenu, ContextMenuEntry, ContextMenuItem};
use workspace::{notifications::NotifyResultExt, Workspace}; use workspace::{notifications::NotifyResultExt, Workspace};
use crate::context::ContextKind;
use crate::context_picker::directory_context_picker::DirectoryContextPicker;
use crate::context_picker::fetch_context_picker::FetchContextPicker; use crate::context_picker::fetch_context_picker::FetchContextPicker;
use crate::context_picker::file_context_picker::FileContextPicker; use crate::context_picker::file_context_picker::FileContextPicker;
use crate::context_picker::thread_context_picker::ThreadContextPicker; use crate::context_picker::thread_context_picker::ThreadContextPicker;
@@ -30,17 +27,41 @@ pub enum ConfirmBehavior {
Close, Close,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ContextPickerMode { enum ContextPickerMode {
File,
Fetch,
Thread,
}
impl ContextPickerMode {
pub fn label(&self) -> &'static str {
match self {
Self::File => "File/Directory",
Self::Fetch => "Fetch",
Self::Thread => "Thread",
}
}
pub fn icon(&self) -> IconName {
match self {
Self::File => IconName::File,
Self::Fetch => IconName::Globe,
Self::Thread => IconName::MessageCircle,
}
}
}
#[derive(Debug, Clone)]
enum ContextPickerState {
Default(Entity<ContextMenu>), Default(Entity<ContextMenu>),
File(Entity<FileContextPicker>), File(Entity<FileContextPicker>),
Directory(Entity<DirectoryContextPicker>),
Fetch(Entity<FetchContextPicker>), Fetch(Entity<FetchContextPicker>),
Thread(Entity<ThreadContextPicker>), Thread(Entity<ThreadContextPicker>),
} }
pub(super) struct ContextPicker { pub(super) struct ContextPicker {
mode: ContextPickerMode, mode: ContextPickerState,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
editor: WeakEntity<Editor>, editor: WeakEntity<Editor>,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
@@ -59,7 +80,7 @@ impl ContextPicker {
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Self { ) -> Self {
ContextPicker { ContextPicker {
mode: ContextPickerMode::Default(ContextMenu::build( mode: ContextPickerState::Default(ContextMenu::build(
window, window,
cx, cx,
|menu, _window, _cx| menu, |menu, _window, _cx| menu,
@@ -73,7 +94,7 @@ impl ContextPicker {
} }
pub fn init(&mut self, window: &mut Window, cx: &mut Context<Self>) { pub fn init(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.mode = ContextPickerMode::Default(self.build_menu(window, cx)); self.mode = ContextPickerState::Default(self.build_menu(window, cx));
cx.notify(); cx.notify();
} }
@@ -88,13 +109,9 @@ impl ContextPicker {
.enumerate() .enumerate()
.map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry)); .map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry));
let mut context_kinds = vec![ let mut modes = vec![ContextPickerMode::File, ContextPickerMode::Fetch];
ContextKind::File,
ContextKind::Directory,
ContextKind::FetchedUrl,
];
if self.allow_threads() { if self.allow_threads() {
context_kinds.push(ContextKind::Thread); modes.push(ContextPickerMode::Thread);
} }
let menu = menu let menu = menu
@@ -112,15 +129,15 @@ impl ContextPicker {
}) })
.extend(recent_entries) .extend(recent_entries)
.when(has_recent, |menu| menu.separator()) .when(has_recent, |menu| menu.separator())
.extend(context_kinds.into_iter().map(|kind| { .extend(modes.into_iter().map(|mode| {
let context_picker = context_picker.clone(); let context_picker = context_picker.clone();
ContextMenuEntry::new(kind.label()) ContextMenuEntry::new(mode.label())
.icon(kind.icon()) .icon(mode.icon())
.icon_size(IconSize::XSmall) .icon_size(IconSize::XSmall)
.icon_color(Color::Muted) .icon_color(Color::Muted)
.handler(move |window, cx| { .handler(move |window, cx| {
context_picker.update(cx, |this, cx| this.select_kind(kind, window, cx)) context_picker.update(cx, |this, cx| this.select_mode(mode, window, cx))
}) })
})); }));
@@ -143,12 +160,17 @@ impl ContextPicker {
self.thread_store.is_some() self.thread_store.is_some()
} }
fn select_kind(&mut self, kind: ContextKind, window: &mut Window, cx: &mut Context<Self>) { fn select_mode(
&mut self,
mode: ContextPickerMode,
window: &mut Window,
cx: &mut Context<Self>,
) {
let context_picker = cx.entity().downgrade(); let context_picker = cx.entity().downgrade();
match kind { match mode {
ContextKind::File => { ContextPickerMode::File => {
self.mode = ContextPickerMode::File(cx.new(|cx| { self.mode = ContextPickerState::File(cx.new(|cx| {
FileContextPicker::new( FileContextPicker::new(
context_picker.clone(), context_picker.clone(),
self.workspace.clone(), self.workspace.clone(),
@@ -160,20 +182,8 @@ impl ContextPicker {
) )
})); }));
} }
ContextKind::Directory => { ContextPickerMode::Fetch => {
self.mode = ContextPickerMode::Directory(cx.new(|cx| { self.mode = ContextPickerState::Fetch(cx.new(|cx| {
DirectoryContextPicker::new(
context_picker.clone(),
self.workspace.clone(),
self.context_store.clone(),
self.confirm_behavior,
window,
cx,
)
}));
}
ContextKind::FetchedUrl => {
self.mode = ContextPickerMode::Fetch(cx.new(|cx| {
FetchContextPicker::new( FetchContextPicker::new(
context_picker.clone(), context_picker.clone(),
self.workspace.clone(), self.workspace.clone(),
@@ -184,9 +194,9 @@ impl ContextPicker {
) )
})); }));
} }
ContextKind::Thread => { ContextPickerMode::Thread => {
if let Some(thread_store) = self.thread_store.as_ref() { if let Some(thread_store) = self.thread_store.as_ref() {
self.mode = ContextPickerMode::Thread(cx.new(|cx| { self.mode = ContextPickerState::Thread(cx.new(|cx| {
ThreadContextPicker::new( ThreadContextPicker::new(
thread_store.clone(), thread_store.clone(),
context_picker.clone(), context_picker.clone(),
@@ -224,6 +234,7 @@ impl ContextPicker {
ElementId::NamedInteger("ctx-recent".into(), ix), ElementId::NamedInteger("ctx-recent".into(), ix),
&path, &path,
&path_prefix, &path_prefix,
false,
context_store.clone(), context_store.clone(),
cx, cx,
) )
@@ -270,10 +281,8 @@ impl ContextPicker {
context_store.add_file_from_path(project_path.clone(), cx) context_store.add_file_from_path(project_path.clone(), cx)
}); });
cx.spawn_in(window, |_, mut cx| async move { cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
task.await.notify_async_err(&mut cx) .detach();
})
.detach();
cx.notify(); cx.notify();
} }
@@ -296,13 +305,13 @@ impl ContextPicker {
}; };
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx)); let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx));
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let thread = open_thread_task.await?; let thread = open_thread_task.await?;
context_store.update(&mut cx, |context_store, cx| { context_store.update(cx, |context_store, cx| {
context_store.add_thread(thread, cx); context_store.add_thread(thread, cx);
})?; })?;
this.update(&mut cx, |_this, cx| cx.notify()) this.update(cx, |_this, cx| cx.notify())
}) })
} }
@@ -392,11 +401,10 @@ impl EventEmitter<DismissEvent> for ContextPicker {}
impl Focusable for ContextPicker { impl Focusable for ContextPicker {
fn focus_handle(&self, cx: &App) -> FocusHandle { fn focus_handle(&self, cx: &App) -> FocusHandle {
match &self.mode { match &self.mode {
ContextPickerMode::Default(menu) => menu.focus_handle(cx), ContextPickerState::Default(menu) => menu.focus_handle(cx),
ContextPickerMode::File(file_picker) => file_picker.focus_handle(cx), ContextPickerState::File(file_picker) => file_picker.focus_handle(cx),
ContextPickerMode::Directory(directory_picker) => directory_picker.focus_handle(cx), ContextPickerState::Fetch(fetch_picker) => fetch_picker.focus_handle(cx),
ContextPickerMode::Fetch(fetch_picker) => fetch_picker.focus_handle(cx), ContextPickerState::Thread(thread_picker) => thread_picker.focus_handle(cx),
ContextPickerMode::Thread(thread_picker) => thread_picker.focus_handle(cx),
} }
} }
} }
@@ -407,13 +415,10 @@ impl Render for ContextPicker {
.w(px(400.)) .w(px(400.))
.min_w(px(400.)) .min_w(px(400.))
.map(|parent| match &self.mode { .map(|parent| match &self.mode {
ContextPickerMode::Default(menu) => parent.child(menu.clone()), ContextPickerState::Default(menu) => parent.child(menu.clone()),
ContextPickerMode::File(file_picker) => parent.child(file_picker.clone()), ContextPickerState::File(file_picker) => parent.child(file_picker.clone()),
ContextPickerMode::Directory(directory_picker) => { ContextPickerState::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
parent.child(directory_picker.clone()) ContextPickerState::Thread(thread_picker) => parent.child(thread_picker.clone()),
}
ContextPickerMode::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
ContextPickerMode::Thread(thread_picker) => parent.child(thread_picker.clone()),
}) })
} }
} }

View File

@@ -1,269 +0,0 @@
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use fuzzy::PathMatch;
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
use ui::{prelude::*, ListItem};
use util::ResultExt as _;
use workspace::{notifications::NotifyResultExt, Workspace};
use crate::context_picker::{ConfirmBehavior, ContextPicker};
use crate::context_store::ContextStore;
pub struct DirectoryContextPicker {
picker: Entity<Picker<DirectoryContextPickerDelegate>>,
}
impl DirectoryContextPicker {
pub fn new(
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let delegate = DirectoryContextPickerDelegate::new(
context_picker,
workspace,
context_store,
confirm_behavior,
);
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
Self { picker }
}
}
impl Focusable for DirectoryContextPicker {
fn focus_handle(&self, cx: &App) -> FocusHandle {
self.picker.focus_handle(cx)
}
}
impl Render for DirectoryContextPicker {
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
self.picker.clone()
}
}
pub struct DirectoryContextPickerDelegate {
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
matches: Vec<PathMatch>,
selected_index: usize,
}
impl DirectoryContextPickerDelegate {
pub fn new(
context_picker: WeakEntity<ContextPicker>,
workspace: WeakEntity<Workspace>,
context_store: WeakEntity<ContextStore>,
confirm_behavior: ConfirmBehavior,
) -> Self {
Self {
context_picker,
workspace,
context_store,
confirm_behavior,
matches: Vec::new(),
selected_index: 0,
}
}
fn search(
&mut self,
query: String,
cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>,
cx: &mut Context<Picker<Self>>,
) -> Task<Vec<PathMatch>> {
if query.is_empty() {
let workspace = workspace.read(cx);
let project = workspace.project().read(cx);
let directory_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into();
worktree.directories(false, 0).map(move |entry| PathMatch {
score: 0.,
positions: Vec::new(),
worktree_id: worktree.id().to_usize(),
path: entry.path.clone(),
path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0,
is_dir: true,
})
});
Task::ready(directory_matches.collect())
} else {
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
let candidate_sets = worktrees
.into_iter()
.map(|worktree| {
let worktree = worktree.read(cx);
PathMatchCandidateSet {
snapshot: worktree.snapshot(),
include_ignored: worktree
.root_entry()
.map_or(false, |entry| entry.is_ignored),
include_root_name: true,
candidates: project::Candidates::Directories,
}
})
.collect::<Vec<_>>();
let executor = cx.background_executor().clone();
cx.foreground_executor().spawn(async move {
fuzzy::match_path_sets(
candidate_sets.as_slice(),
query.as_str(),
None,
false,
100,
&cancellation_flag,
executor,
)
.await
})
}
}
}
impl PickerDelegate for DirectoryContextPickerDelegate {
type ListItem = ListItem;
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.selected_index
}
fn set_selected_index(
&mut self,
ix: usize,
_window: &mut Window,
_cx: &mut Context<Picker<Self>>,
) {
self.selected_index = ix;
}
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
"Search folders…".into()
}
fn update_matches(
&mut self,
query: String,
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let Some(workspace) = self.workspace.upgrade() else {
return Task::ready(());
};
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn(|this, mut cx| async move {
let mut paths = search_task.await;
let empty_path = Path::new("");
paths.retain(|path_match| path_match.path.as_ref() != empty_path);
this.update(&mut cx, |this, _cx| {
this.delegate.matches = paths;
})
.log_err();
})
}
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
let Some(mat) = self.matches.get(self.selected_index) else {
return;
};
let project_path = ProjectPath {
worktree_id: WorktreeId::from_usize(mat.worktree_id),
path: mat.path.clone(),
};
let Some(task) = self
.context_store
.update(cx, |context_store, cx| {
context_store.add_directory(project_path, cx)
})
.ok()
else {
return;
};
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, |this, mut cx| async move {
match task.await.notify_async_err(&mut cx) {
None => anyhow::Ok(()),
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
ConfirmBehavior::KeepOpen => {}
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
}),
}
})
.detach_and_log_err(cx);
}
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
self.context_picker
.update(cx, |_, cx| {
cx.emit(DismissEvent);
})
.ok();
}
fn render_match(
&self,
ix: usize,
selected: bool,
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let path_match = &self.matches[ix];
let directory_name = path_match.path.to_string_lossy().to_string();
let added = self.context_store.upgrade().map_or(false, |context_store| {
context_store
.read(cx)
.includes_directory(&path_match.path)
.is_some()
});
Some(
ListItem::new(ix)
.inset(true)
.toggle_state(selected)
.start_slot(
Icon::new(IconName::Folder)
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(Label::new(directory_name))
.when(added, |el| {
el.end_slot(
h_flex()
.gap_1()
.child(
Icon::new(IconName::Check)
.size(IconSize::Small)
.color(Color::Success),
)
.child(Label::new("Added").size(LabelSize::Small)),
)
}),
)
}
}

View File

@@ -206,12 +206,12 @@ impl PickerDelegate for FetchContextPickerDelegate {
let http_client = workspace.read(cx).client().http_client().clone(); let http_client = workspace.read(cx).client().http_client().clone();
let url = self.url.clone(); let url = self.url.clone();
let confirm_behavior = self.confirm_behavior; let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let text = cx let text = cx
.background_spawn(Self::build_message(http_client, url.clone())) .background_spawn(Self::build_message(http_client, url.clone()))
.await?; .await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.delegate this.delegate
.context_store .context_store
.update(cx, |context_store, _cx| { .update(cx, |context_store, _cx| {

View File

@@ -99,7 +99,6 @@ impl FileContextPickerDelegate {
query: String, query: String,
cancellation_flag: Arc<AtomicBool>, cancellation_flag: Arc<AtomicBool>,
workspace: &Entity<Workspace>, workspace: &Entity<Workspace>,
cx: &mut Context<Picker<Self>>, cx: &mut Context<Picker<Self>>,
) -> Task<Vec<PathMatch>> { ) -> Task<Vec<PathMatch>> {
if query.is_empty() { if query.is_empty() {
@@ -124,14 +123,14 @@ impl FileContextPickerDelegate {
let file_matches = project.worktrees(cx).flat_map(|worktree| { let file_matches = project.worktrees(cx).flat_map(|worktree| {
let worktree = worktree.read(cx); let worktree = worktree.read(cx);
let path_prefix: Arc<str> = worktree.root_name().into(); let path_prefix: Arc<str> = worktree.root_name().into();
worktree.files(false, 0).map(move |entry| PathMatch { worktree.entries(false, 0).map(move |entry| PathMatch {
score: 0., score: 0.,
positions: Vec::new(), positions: Vec::new(),
worktree_id: worktree.id().to_usize(), worktree_id: worktree.id().to_usize(),
path: entry.path.clone(), path: entry.path.clone(),
path_prefix: path_prefix.clone(), path_prefix: path_prefix.clone(),
distance_to_relative_ancestor: 0, distance_to_relative_ancestor: 0,
is_dir: false, is_dir: entry.is_dir(),
}) })
}); });
@@ -149,7 +148,7 @@ impl FileContextPickerDelegate {
.root_entry() .root_entry()
.map_or(false, |entry| entry.is_ignored), .map_or(false, |entry| entry.is_ignored),
include_root_name: true, include_root_name: true,
candidates: project::Candidates::Files, candidates: project::Candidates::Entries,
} }
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@@ -192,7 +191,7 @@ impl PickerDelegate for FileContextPickerDelegate {
} }
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> { fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
"Search files…".into() "Search files & directories".into()
} }
fn update_matches( fn update_matches(
@@ -207,11 +206,11 @@ impl PickerDelegate for FileContextPickerDelegate {
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx); let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
// TODO: This should be probably be run in the background. // TODO: This should be probably be run in the background.
let paths = search_task.await; let paths = search_task.await;
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.delegate.matches = paths; this.delegate.matches = paths;
}) })
.log_err(); .log_err();
@@ -223,13 +222,11 @@ impl PickerDelegate for FileContextPickerDelegate {
return; return;
}; };
let Some(file_name) = mat let file_name = mat
.path .path
.file_name() .file_name()
.map(|os_str| os_str.to_string_lossy().into_owned()) .map(|os_str| os_str.to_string_lossy().into_owned())
else { .unwrap_or(mat.path_prefix.to_string());
return;
};
let full_path = mat.path.display().to_string(); let full_path = mat.path.display().to_string();
@@ -238,6 +235,8 @@ impl PickerDelegate for FileContextPickerDelegate {
path: mat.path.clone(), path: mat.path.clone(),
}; };
let is_directory = mat.is_dir;
let Some(editor_entity) = self.editor.upgrade() else { let Some(editor_entity) = self.editor.upgrade() else {
return; return;
}; };
@@ -288,8 +287,12 @@ impl PickerDelegate for FileContextPickerDelegate {
editor.insert("\n", window, cx); // Needed to end the fold editor.insert("\n", window, cx); // Needed to end the fold
let file_icon = FileIcons::get_icon(&Path::new(&full_path), cx) let file_icon = if is_directory {
.unwrap_or_else(|| SharedString::new("")); FileIcons::get_folder_icon(false, cx)
} else {
FileIcons::get_icon(&Path::new(&full_path), cx)
}
.unwrap_or_else(|| SharedString::new(""));
let placeholder = FoldPlaceholder { let placeholder = FoldPlaceholder {
render: render_fold_icon_button( render: render_fold_icon_button(
@@ -330,7 +333,11 @@ impl PickerDelegate for FileContextPickerDelegate {
let Some(task) = self let Some(task) = self
.context_store .context_store
.update(cx, |context_store, cx| { .update(cx, |context_store, cx| {
context_store.add_file_from_path(project_path, cx) if is_directory {
context_store.add_directory(project_path, cx)
} else {
context_store.add_file_from_path(project_path, cx)
}
}) })
.ok() .ok()
else { else {
@@ -338,10 +345,10 @@ impl PickerDelegate for FileContextPickerDelegate {
}; };
let confirm_behavior = self.confirm_behavior; let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(&mut cx) { match task.await.notify_async_err(cx) {
None => anyhow::Ok(()), None => anyhow::Ok(()),
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior { Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
ConfirmBehavior::KeepOpen => {} ConfirmBehavior::KeepOpen => {}
ConfirmBehavior::Close => this.delegate.dismissed(window, cx), ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
}), }),
@@ -375,6 +382,7 @@ impl PickerDelegate for FileContextPickerDelegate {
ElementId::NamedInteger("file-ctx-picker".into(), ix), ElementId::NamedInteger("file-ctx-picker".into(), ix),
&path_match.path, &path_match.path,
&path_match.path_prefix, &path_match.path_prefix,
path_match.is_dir,
self.context_store.clone(), self.context_store.clone(),
cx, cx,
)), )),
@@ -386,6 +394,7 @@ pub fn render_file_context_entry(
id: ElementId, id: ElementId,
path: &Path, path: &Path,
path_prefix: &Arc<str>, path_prefix: &Arc<str>,
is_directory: bool,
context_store: WeakEntity<ContextStore>, context_store: WeakEntity<ContextStore>,
cx: &App, cx: &App,
) -> Stateful<Div> { ) -> Stateful<Div> {
@@ -409,13 +418,24 @@ pub fn render_file_context_entry(
(file_name, Some(directory)) (file_name, Some(directory))
}; };
let added = context_store let added = context_store.upgrade().and_then(|context_store| {
.upgrade() if is_directory {
.and_then(|context_store| context_store.read(cx).will_include_file_path(path, cx)); context_store
.read(cx)
.includes_directory(path)
.map(FileInclusion::Direct)
} else {
context_store.read(cx).will_include_file_path(path, cx)
}
});
let file_icon = FileIcons::get_icon(&path, cx) let file_icon = if is_directory {
.map(Icon::from_path) FileIcons::get_folder_icon(false, cx)
.unwrap_or_else(|| Icon::new(IconName::File)); } else {
FileIcons::get_icon(&path, cx)
}
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
h_flex() h_flex()
.id(id) .id(id)

View File

@@ -149,9 +149,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
} }
}); });
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let matches = search_task.await; let matches = search_task.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.delegate.matches = matches; this.delegate.matches = matches;
this.delegate.selected_index = 0; this.delegate.selected_index = 0;
cx.notify(); cx.notify();
@@ -171,9 +171,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx)); let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let thread = open_thread_task.await?; let thread = open_thread_task.await?;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.delegate this.delegate
.context_store .context_store
.update(cx, |context_store, cx| context_store.add_thread(thread, cx)) .update(cx, |context_store, cx| context_store.add_thread(thread, cx))

View File

@@ -75,15 +75,15 @@ impl ContextStore {
return Task::ready(Err(anyhow!("failed to read project"))); return Task::ready(Err(anyhow!("failed to read project")));
}; };
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let open_buffer_task = project.update(&mut cx, |project, cx| { let open_buffer_task = project.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx) project.open_buffer(project_path.clone(), cx)
})?; })?;
let buffer_entity = open_buffer_task.await?; let buffer_entity = open_buffer_task.await?;
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?; let buffer_id = this.update(cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let already_included = this.update(&mut cx, |this, _cx| { let already_included = this.update(cx, |this, _cx| {
match this.will_include_buffer(buffer_id, &project_path.path) { match this.will_include_buffer(buffer_id, &project_path.path) {
Some(FileInclusion::Direct(context_id)) => { Some(FileInclusion::Direct(context_id)) => {
this.remove_context(context_id); this.remove_context(context_id);
@@ -98,7 +98,7 @@ impl ContextStore {
return anyhow::Ok(()); return anyhow::Ok(());
} }
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| { let (buffer_info, text_task) = this.update(cx, |_, cx| {
let buffer = buffer_entity.read(cx); let buffer = buffer_entity.read(cx);
collect_buffer_info_and_text( collect_buffer_info_and_text(
project_path.path.clone(), project_path.path.clone(),
@@ -110,7 +110,7 @@ impl ContextStore {
let text = text_task.await; let text = text_task.await;
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text)); this.insert_file(make_context_buffer(buffer_info, text));
})?; })?;
@@ -123,8 +123,8 @@ impl ContextStore {
buffer_entity: Entity<Buffer>, buffer_entity: Entity<Buffer>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Task<Result<()>> { ) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| { let (buffer_info, text_task) = this.update(cx, |_, cx| {
let buffer = buffer_entity.read(cx); let buffer = buffer_entity.read(cx);
let Some(file) = buffer.file() else { let Some(file) = buffer.file() else {
return Err(anyhow!("Buffer has no path.")); return Err(anyhow!("Buffer has no path."));
@@ -139,7 +139,7 @@ impl ContextStore {
let text = text_task.await; let text = text_task.await;
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text)) this.insert_file(make_context_buffer(buffer_info, text))
})?; })?;
@@ -179,18 +179,18 @@ impl ContextStore {
} }
let worktree_id = project_path.worktree_id; let worktree_id = project_path.worktree_id;
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let worktree = project.update(&mut cx, |project, cx| { let worktree = project.update(cx, |project, cx| {
project project
.worktree_for_id(worktree_id, cx) .worktree_for_id(worktree_id, cx)
.ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}")) .ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}"))
})??; })??;
let files = worktree.update(&mut cx, |worktree, _cx| { let files = worktree.update(cx, |worktree, _cx| {
collect_files_in_path(worktree, &project_path.path) collect_files_in_path(worktree, &project_path.path)
})?; })?;
let open_buffers_task = project.update(&mut cx, |project, cx| { let open_buffers_task = project.update(cx, |project, cx| {
let tasks = files.iter().map(|file_path| { let tasks = files.iter().map(|file_path| {
project.open_buffer( project.open_buffer(
ProjectPath { ProjectPath {
@@ -207,7 +207,7 @@ impl ContextStore {
let mut buffer_infos = Vec::new(); let mut buffer_infos = Vec::new();
let mut text_tasks = Vec::new(); let mut text_tasks = Vec::new();
this.update(&mut cx, |_, cx| { this.update(cx, |_, cx| {
for (path, buffer_entity) in files.into_iter().zip(buffers) { for (path, buffer_entity) in files.into_iter().zip(buffers) {
// Skip all binary files and other non-UTF8 files // Skip all binary files and other non-UTF8 files
if let Ok(buffer_entity) = buffer_entity { if let Ok(buffer_entity) = buffer_entity {
@@ -236,7 +236,7 @@ impl ContextStore {
bail!("No text files found in {}", &project_path.path.display()); bail!("No text files found in {}", &project_path.path.display());
} }
this.update(&mut cx, |this, _| { this.update(cx, |this, _| {
this.insert_directory(&project_path.path, context_buffers); this.insert_directory(&project_path.path, context_buffers);
})?; })?;
@@ -595,10 +595,10 @@ fn refresh_file_text(
let id = file_context.id; let id = file_context.id;
let task = refresh_context_buffer(&file_context.context_buffer, cx); let task = refresh_context_buffer(&file_context.context_buffer, cx);
if let Some(task) = task { if let Some(task) = task {
Some(cx.spawn(|mut cx| async move { Some(cx.spawn(async move |cx| {
let context_buffer = task.await; let context_buffer = task.await;
context_store context_store
.update(&mut cx, |context_store, _| { .update(cx, |context_store, _| {
let new_file_context = FileContext { id, context_buffer }; let new_file_context = FileContext { id, context_buffer };
context_store.replace_context(AssistantContext::File(new_file_context)); context_store.replace_context(AssistantContext::File(new_file_context));
}) })
@@ -636,10 +636,10 @@ fn refresh_directory_text(
let id = directory_context.snapshot.id; let id = directory_context.snapshot.id;
let path = directory_context.path.clone(); let path = directory_context.path.clone();
Some(cx.spawn(|mut cx| async move { Some(cx.spawn(async move |cx| {
let context_buffers = context_buffers.await; let context_buffers = context_buffers.await;
context_store context_store
.update(&mut cx, |context_store, _| { .update(cx, |context_store, _| {
let new_directory_context = DirectoryContext::new(id, &path, context_buffers); let new_directory_context = DirectoryContext::new(id, &path, context_buffers);
context_store.replace_context(AssistantContext::Directory(new_directory_context)); context_store.replace_context(AssistantContext::Directory(new_directory_context));
}) })
@@ -654,9 +654,9 @@ fn refresh_thread_text(
) -> Task<()> { ) -> Task<()> {
let id = thread_context.id; let id = thread_context.id;
let thread = thread_context.thread.clone(); let thread = thread_context.thread.clone();
cx.spawn(move |mut cx| async move { cx.spawn(async move |cx| {
context_store context_store
.update(&mut cx, |context_store, cx| { .update(cx, |context_store, cx| {
let text = thread.read(cx).text().into(); let text = thread.read(cx).text().into();
context_store.replace_context(AssistantContext::Thread(ThreadContext { context_store.replace_context(AssistantContext::Thread(ThreadContext {
id, id,

View File

@@ -335,12 +335,12 @@ impl ContextStrip {
context_store.accept_suggested_context(&suggested, cx) context_store.accept_suggested_context(&suggested, cx)
}); });
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(&mut cx) { match task.await.notify_async_err(cx) {
None => {} None => {}
Some(()) => { Some(()) => {
if let Some(this) = this.upgrade() { if let Some(this) = this.upgrade() {
this.update(&mut cx, |_, cx| cx.notify())?; this.update(cx, |_, cx| cx.notify())?;
} }
} }
} }

View File

@@ -276,7 +276,7 @@ impl InlineAssistant {
if is_authenticated() { if is_authenticated() {
handle_assist(window, cx); handle_assist(window, cx);
} else { } else {
cx.spawn_in(window, |_workspace, mut cx| async move { cx.spawn_in(window, async move |_workspace, cx| {
let Some(task) = cx.update(|_, cx| { let Some(task) = cx.update(|_, cx| {
LanguageModelRegistry::read_global(cx) LanguageModelRegistry::read_global(cx)
.active_provider() .active_provider()
@@ -1456,9 +1456,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(), assist_ids: Vec::new(),
scroll_lock: None, scroll_lock: None,
highlight_updates: highlight_updates_tx, highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn(|cx| { _update_highlights: cx.spawn({
let editor = editor.downgrade(); let editor = editor.downgrade();
async move { async move |cx| {
while let Ok(()) = highlight_updates_rx.changed().await { while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?; let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| { cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1729,6 +1729,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
title: "Fix with Assistant".into(), title: "Fix with Assistant".into(),
..Default::default() ..Default::default()
})), })),
resolved: true,
}])) }]))
} else { } else {
Task::ready(Ok(Vec::new())) Task::ready(Ok(Vec::new()))
@@ -1747,10 +1748,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let editor = self.editor.clone(); let editor = self.editor.clone();
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
let thread_store = self.thread_store.clone(); let thread_store = self.thread_store.clone();
window.spawn(cx, |mut cx| async move { window.spawn(cx, async move |cx| {
let editor = editor.upgrade().context("editor was released")?; let editor = editor.upgrade().context("editor was released")?;
let range = editor let range = editor
.update(&mut cx, |editor, cx| { .update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| { editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx); let multibuffer_snapshot = multibuffer.read(cx);

View File

@@ -3,23 +3,25 @@ use std::sync::Arc;
use collections::HashSet; use collections::HashSet;
use editor::actions::MoveUp; use editor::actions::MoveUp;
use editor::{Editor, EditorElement, EditorEvent, EditorStyle}; use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
use file_icons::FileIcons;
use fs::Fs; use fs::Fs;
use git::ExpandCommitEditor;
use git_ui::git_panel;
use gpui::{ use gpui::{
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle, Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
WeakEntity, WeakEntity,
}; };
use language_model::LanguageModelRegistry; use language_model::LanguageModelRegistry;
use language_model_selector::ToggleModelSelector; use language_model_selector::ToggleModelSelector;
use project::Project;
use rope::Point; use rope::Point;
use settings::Settings; use settings::Settings;
use std::time::Duration; use std::time::Duration;
use text::Bias; use text::Bias;
use theme::ThemeSettings; use theme::ThemeSettings;
use ui::{ use ui::{
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
Tooltip,
}; };
use util::ResultExt;
use vim_mode_setting::VimModeSetting; use vim_mode_setting::VimModeSetting;
use workspace::notifications::{NotificationId, NotifyTaskExt}; use workspace::notifications::{NotificationId, NotifyTaskExt};
use workspace::{Toast, Workspace}; use workspace::{Toast, Workspace};
@@ -37,6 +39,7 @@ pub struct MessageEditor {
thread: Entity<Thread>, thread: Entity<Thread>,
editor: Entity<Editor>, editor: Entity<Editor>,
workspace: WeakEntity<Workspace>, workspace: WeakEntity<Workspace>,
project: Entity<Project>,
context_store: Entity<ContextStore>, context_store: Entity<ContextStore>,
context_strip: Entity<ContextStrip>, context_strip: Entity<ContextStrip>,
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>, context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
@@ -44,7 +47,6 @@ pub struct MessageEditor {
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>, inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
model_selector: Entity<AssistantModelSelector>, model_selector: Entity<AssistantModelSelector>,
tool_selector: Entity<ToolSelector>, tool_selector: Entity<ToolSelector>,
edits_expanded: bool,
_subscriptions: Vec<Subscription>, _subscriptions: Vec<Subscription>,
} }
@@ -107,8 +109,9 @@ impl MessageEditor {
]; ];
Self { Self {
thread,
editor: editor.clone(), editor: editor.clone(),
project: thread.read(cx).project().clone(),
thread,
workspace, workspace,
context_store, context_store,
context_strip, context_strip,
@@ -125,7 +128,6 @@ impl MessageEditor {
) )
}), }),
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)), tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
edits_expanded: false,
_subscriptions: subscriptions, _subscriptions: subscriptions,
} }
} }
@@ -158,7 +160,7 @@ impl MessageEditor {
return; return;
} }
if self.thread.read(cx).is_streaming() { if self.thread.read(cx).is_generating() {
return; return;
} }
@@ -206,12 +208,15 @@ impl MessageEditor {
let thread = self.thread.clone(); let thread = self.thread.clone();
let context_store = self.context_store.clone(); let context_store = self.context_store.clone();
cx.spawn(move |_, mut cx| async move { let git_store = self.project.read(cx).git_store();
let checkpoint = git_store.read(cx).checkpoint(cx);
cx.spawn(async move |_, cx| {
refresh_task.await; refresh_task.await;
let checkpoint = checkpoint.await.log_err();
thread thread
.update(&mut cx, |thread, cx| { .update(cx, |thread, cx| {
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>(); let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
thread.insert_user_message(user_message, context, cx); thread.insert_user_message(user_message, context, checkpoint, cx);
thread.send_to_model(model, request_kind, cx); thread.send_to_model(model, request_kind, cx);
}) })
.ok(); .ok();
@@ -297,9 +302,9 @@ impl MessageEditor {
.thread .thread
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx)); .update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
cx.spawn(|_, mut cx| async move { cx.spawn(async move |_, cx| {
report.await?; report.await?;
workspace.update(&mut cx, |workspace, cx| { workspace.update(cx, |workspace, cx| {
let message = if is_positive { let message = if is_positive {
"Positive feedback recorded. Thank you!" "Positive feedback recorded. Thank you!"
} else { } else {
@@ -328,7 +333,7 @@ impl Render for MessageEditor {
let focus_handle = self.editor.focus_handle(cx); let focus_handle = self.editor.focus_handle(cx);
let inline_context_picker = self.inline_context_picker.clone(); let inline_context_picker = self.inline_context_picker.clone();
let bg_color = cx.theme().colors().editor_background; let bg_color = cx.theme().colors().editor_background;
let is_streaming_completion = self.thread.read(cx).is_streaming(); let is_generating = self.thread.read(cx).is_generating();
let is_model_selected = self.is_model_selected(cx); let is_model_selected = self.is_model_selected(cx);
let is_editor_empty = self.is_editor_empty(cx); let is_editor_empty = self.is_editor_empty(cx);
let submit_label_color = if is_editor_empty { let submit_label_color = if is_editor_empty {
@@ -347,12 +352,16 @@ impl Render for MessageEditor {
px(64.) px(64.)
}; };
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx); let project = self.thread.read(cx).project();
let changed_buffers_count = changed_buffers.len(); let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
repository.read(cx).status().count()
} else {
0
};
v_flex() v_flex()
.size_full() .size_full()
.when(is_streaming_completion, |parent| { .when(is_generating, |parent| {
let focus_handle = self.editor.focus_handle(cx).clone(); let focus_handle = self.editor.focus_handle(cx).clone();
parent.child( parent.child(
h_flex().py_3().w_full().justify_center().child( h_flex().py_3().w_full().justify_center().child(
@@ -410,7 +419,7 @@ impl Render for MessageEditor {
), ),
) )
}) })
.when(changed_buffers_count > 0, |parent| { .when(changed_files > 0, |parent| {
parent.child( parent.child(
v_flex() v_flex()
.mx_2() .mx_2()
@@ -421,96 +430,60 @@ impl Render for MessageEditor {
.rounded_t_md() .rounded_t_md()
.child( .child(
h_flex() h_flex()
.gap_2() .justify_between()
.p_2() .p_2()
.child( .child(
Disclosure::new("edits-disclosure", self.edits_expanded) h_flex()
.on_click(cx.listener(|this, _ev, _window, cx| { .gap_2()
this.edits_expanded = !this.edits_expanded; .child(
cx.notify(); IconButton::new(
})), "edits-disclosure",
IconName::GitBranchSmall,
)
.icon_size(IconSize::Small)
.on_click(
|_ev, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&git_panel::ToggleFocus)
});
},
),
)
.child(
Label::new(format!(
"{} {} changed",
changed_files,
if changed_files == 1 { "file" } else { "files" }
))
.size(LabelSize::XSmall)
.color(Color::Muted),
),
) )
.child( .child(
Label::new("Edits") h_flex()
.size(LabelSize::XSmall) .gap_2()
.color(Color::Muted), .child(
) Button::new("review", "Review")
.child(Label::new("").size(LabelSize::XSmall).color(Color::Muted)) .label_size(LabelSize::XSmall)
.child( .on_click(|_event, _window, cx| {
Label::new(format!( cx.defer(|cx| {
"{} {}", cx.dispatch_action(
changed_buffers_count, &git_ui::project_diff::Diff,
if changed_buffers_count == 1 { );
"file" });
} else { }),
"files" )
} .child(
)) Button::new("commit", "Commit")
.size(LabelSize::XSmall) .label_size(LabelSize::XSmall)
.color(Color::Muted), .on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&ExpandCommitEditor)
});
}),
),
), ),
) ),
.when(self.edits_expanded, |parent| {
parent.child(
v_flex().bg(cx.theme().colors().editor_background).children(
changed_buffers.enumerate().flat_map(|(index, buffer)| {
let file = buffer.read(cx).file()?;
let path = file.path();
let parent_label = path.parent().and_then(|parent| {
let parent_str = parent.to_string_lossy();
if parent_str.is_empty() {
None
} else {
Some(
Label::new(format!(
"{}{}",
parent_str,
std::path::MAIN_SEPARATOR_STR
))
.color(Color::Muted)
.size(LabelSize::Small),
)
}
});
let name_label = path.file_name().map(|name| {
Label::new(name.to_string_lossy().to_string())
.size(LabelSize::Small)
});
let file_icon = FileIcons::get_icon(&path, cx)
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
let element = div()
.p_2()
.when(index + 1 < changed_buffers_count, |parent| {
parent
.border_color(cx.theme().colors().border)
.border_b_1()
})
.child(
h_flex()
.gap_2()
.child(file_icon)
.child(
// TODO: handle overflow
h_flex()
.children(parent_label)
.children(name_label),
)
// TODO: show lines changed
.child(Label::new("+").color(Color::Created))
.child(Label::new("-").color(Color::Deleted)),
);
Some(element)
}),
),
)
}),
) )
}) })
.child( .child(
@@ -625,7 +598,7 @@ impl Render for MessageEditor {
.disabled( .disabled(
is_editor_empty is_editor_empty
|| !is_model_selected || !is_model_selected
|| is_streaming_completion, || is_generating,
) )
.child( .child(
h_flex() h_flex()
@@ -660,7 +633,7 @@ impl Render for MessageEditor {
"Type a message to submit", "Type a message to submit",
)) ))
}) })
.when(is_streaming_completion, |button| { .when(is_generating, |button| {
button.tooltip(Tooltip::text( button.tooltip(Tooltip::text(
"Cancel to submit a new message", "Cancel to submit a new message",
)) ))

View File

@@ -40,7 +40,7 @@ impl TerminalCodegen {
let telemetry = self.telemetry.clone(); let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending; self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone())); self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(|this, mut cx| async move { self.generation = cx.spawn(async move |this, cx| {
let model_telemetry_id = model.telemetry_id(); let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id(); let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await; let response = model.stream_completion_text(prompt, &cx).await;
@@ -97,12 +97,12 @@ impl TerminalCodegen {
} }
}); });
this.update(&mut cx, |this, _| { this.update(cx, |this, _| {
this.message_id = message_id; this.message_id = message_id;
})?; })?;
while let Some(hunk) = hunks_rx.next().await { while let Some(hunk) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Some(transaction) = &mut this.transaction { if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx); transaction.push(hunk, cx);
cx.notify(); cx.notify();
@@ -116,7 +116,7 @@ impl TerminalCodegen {
let result = generate.await; let result = generate.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Err(error) = result { if let Err(error) = result {
this.status = CodegenStatus::Error(error); this.status = CodegenStatus::Error(error);
} else { } else {

View File

@@ -1,3 +1,4 @@
use std::fmt::Write as _;
use std::io::Write; use std::io::Write;
use std::sync::Arc; use std::sync::Arc;
@@ -15,6 +16,7 @@ use language_model::{
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError, LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
Role, StopReason, TokenUsage, Role, StopReason, TokenUsage,
}; };
use project::git::GitStoreCheckpoint;
use project::Project; use project::Project;
use prompt_store::{AssistantSystemPromptWorktree, PromptBuilder}; use prompt_store::{AssistantSystemPromptWorktree, PromptBuilder};
use scripting_tool::{ScriptingSession, ScriptingTool}; use scripting_tool::{ScriptingSession, ScriptingTool};
@@ -88,6 +90,12 @@ pub struct GitState {
pub diff: Option<String>, pub diff: Option<String>,
} }
#[derive(Clone)]
pub struct ThreadCheckpoint {
message_id: MessageId,
git_checkpoint: GitStoreCheckpoint,
}
/// A thread of conversation with the LLM. /// A thread of conversation with the LLM.
pub struct Thread { pub struct Thread {
id: ThreadId, id: ThreadId,
@@ -98,6 +106,7 @@ pub struct Thread {
next_message_id: MessageId, next_message_id: MessageId,
context: BTreeMap<ContextId, ContextSnapshot>, context: BTreeMap<ContextId, ContextSnapshot>,
context_by_message: HashMap<MessageId, Vec<ContextId>>, context_by_message: HashMap<MessageId, Vec<ContextId>>,
checkpoints_by_message: HashMap<MessageId, GitStoreCheckpoint>,
completion_count: usize, completion_count: usize,
pending_completions: Vec<PendingCompletion>, pending_completions: Vec<PendingCompletion>,
project: Entity<Project>, project: Entity<Project>,
@@ -127,6 +136,7 @@ impl Thread {
next_message_id: MessageId(0), next_message_id: MessageId(0),
context: BTreeMap::default(), context: BTreeMap::default(),
context_by_message: HashMap::default(), context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0, completion_count: 0,
pending_completions: Vec::new(), pending_completions: Vec::new(),
project: project.clone(), project: project.clone(),
@@ -187,6 +197,7 @@ impl Thread {
next_message_id, next_message_id,
context: BTreeMap::default(), context: BTreeMap::default(),
context_by_message: HashMap::default(), context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0, completion_count: 0,
pending_completions: Vec::new(), pending_completions: Vec::new(),
project, project,
@@ -240,7 +251,7 @@ impl Thread {
self.messages.iter() self.messages.iter()
} }
pub fn is_streaming(&self) -> bool { pub fn is_generating(&self) -> bool {
!self.pending_completions.is_empty() || !self.all_tools_finished() !self.pending_completions.is_empty() || !self.all_tools_finished()
} }
@@ -248,6 +259,45 @@ impl Thread {
&self.tools &self.tools
} }
pub fn checkpoint_for_message(&self, id: MessageId) -> Option<ThreadCheckpoint> {
let checkpoint = self.checkpoints_by_message.get(&id).cloned()?;
Some(ThreadCheckpoint {
message_id: id,
git_checkpoint: checkpoint,
})
}
pub fn restore_checkpoint(
&mut self,
checkpoint: ThreadCheckpoint,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let project = self.project.read(cx);
let restore = project
.git_store()
.read(cx)
.restore_checkpoint(checkpoint.git_checkpoint, cx);
cx.spawn(async move |this, cx| {
restore.await?;
this.update(cx, |this, cx| this.truncate(checkpoint.message_id, cx))
})
}
pub fn truncate(&mut self, message_id: MessageId, cx: &mut Context<Self>) {
let Some(message_ix) = self
.messages
.iter()
.rposition(|message| message.id == message_id)
else {
return;
};
for deleted_message in self.messages.drain(message_ix..) {
self.context_by_message.remove(&deleted_message.id);
self.checkpoints_by_message.remove(&deleted_message.id);
}
cx.notify();
}
pub fn context_for_message(&self, id: MessageId) -> Option<Vec<ContextSnapshot>> { pub fn context_for_message(&self, id: MessageId) -> Option<Vec<ContextSnapshot>> {
let context = self.context_by_message.get(&id)?; let context = self.context_by_message.get(&id)?;
Some( Some(
@@ -267,8 +317,8 @@ impl Thread {
.into_iter() .into_iter()
.chain(self.scripting_tool_use.pending_tool_uses()); .chain(self.scripting_tool_use.pending_tool_uses());
// If the only pending tool uses left are the ones with errors, then that means that we've finished running all // If the only pending tool uses left are the ones with errors, then
// of the pending tools. // that means that we've finished running all of the pending tools.
all_pending_tool_uses.all(|tool_use| tool_use.status.is_error()) all_pending_tool_uses.all(|tool_use| tool_use.status.is_error())
} }
@@ -295,13 +345,6 @@ impl Thread {
self.scripting_tool_use.tool_results_for_message(id) self.scripting_tool_use.tool_results_for_message(id)
} }
pub fn scripting_changed_buffers<'a>(
&self,
cx: &'a App,
) -> impl ExactSizeIterator<Item = &'a Entity<language::Buffer>> {
self.scripting_session.read(cx).changed_buffers()
}
pub fn message_has_tool_results(&self, message_id: MessageId) -> bool { pub fn message_has_tool_results(&self, message_id: MessageId) -> bool {
self.tool_use.message_has_tool_results(message_id) self.tool_use.message_has_tool_results(message_id)
} }
@@ -314,6 +357,7 @@ impl Thread {
&mut self, &mut self,
text: impl Into<String>, text: impl Into<String>,
context: Vec<ContextSnapshot>, context: Vec<ContextSnapshot>,
checkpoint: Option<GitStoreCheckpoint>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> MessageId { ) -> MessageId {
let message_id = self.insert_message(Role::User, text, cx); let message_id = self.insert_message(Role::User, text, cx);
@@ -321,6 +365,9 @@ impl Thread {
self.context self.context
.extend(context.into_iter().map(|context| (context.id, context))); .extend(context.into_iter().map(|context| (context.id, context)));
self.context_by_message.insert(message_id, context_ids); self.context_by_message.insert(message_id, context_ids);
if let Some(checkpoint) = checkpoint {
self.checkpoints_by_message.insert(message_id, checkpoint);
}
message_id message_id
} }
@@ -393,9 +440,9 @@ impl Thread {
/// Serializes this thread into a format for storage or telemetry. /// Serializes this thread into a format for storage or telemetry.
pub fn serialize(&self, cx: &mut Context<Self>) -> Task<Result<SerializedThread>> { pub fn serialize(&self, cx: &mut Context<Self>) -> Task<Result<SerializedThread>> {
let initial_project_snapshot = self.initial_project_snapshot.clone(); let initial_project_snapshot = self.initial_project_snapshot.clone();
cx.spawn(|this, cx| async move { cx.spawn(async move |this, cx| {
let initial_project_snapshot = initial_project_snapshot.await; let initial_project_snapshot = initial_project_snapshot.await;
this.read_with(&cx, |this, _| SerializedThread { this.read_with(cx, |this, _| SerializedThread {
summary: this.summary_or_default(), summary: this.summary_or_default(),
updated_at: this.updated_at(), updated_at: this.updated_at(),
messages: this messages: this
@@ -560,9 +607,39 @@ impl Thread {
request.messages.push(context_message); request.messages.push(context_message);
} }
self.attach_stale_files(&mut request.messages, cx);
request request
} }
fn attach_stale_files(&self, messages: &mut Vec<LanguageModelRequestMessage>, cx: &App) {
const STALE_FILES_HEADER: &str = "These files changed since last read:";
let mut stale_message = String::new();
for stale_file in self.action_log.read(cx).stale_buffers(cx) {
let Some(file) = stale_file.read(cx).file() else {
continue;
};
if stale_message.is_empty() {
write!(&mut stale_message, "{}", STALE_FILES_HEADER).ok();
}
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
}
if !stale_message.is_empty() {
let context_message = LanguageModelRequestMessage {
role: Role::User,
content: vec![stale_message.into()],
cache: false,
};
messages.push(context_message);
}
}
pub fn stream_completion( pub fn stream_completion(
&mut self, &mut self,
request: LanguageModelRequest, request: LanguageModelRequest,
@@ -571,8 +648,10 @@ impl Thread {
) { ) {
let pending_completion_id = post_inc(&mut self.completion_count); let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn(|thread, mut cx| async move { let task = cx.spawn(async move |thread, cx| {
let stream = model.stream_completion(request, &cx); let stream = model.stream_completion(request, &cx);
let initial_token_usage =
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage.clone());
let stream_completion = async { let stream_completion = async {
let mut events = stream.await?; let mut events = stream.await?;
let mut stop_reason = StopReason::EndTurn; let mut stop_reason = StopReason::EndTurn;
@@ -581,7 +660,7 @@ impl Thread {
while let Some(event) = events.next().await { while let Some(event) = events.next().await {
let event = event?; let event = event?;
thread.update(&mut cx, |thread, cx| { thread.update(cx, |thread, cx| {
match event { match event {
LanguageModelCompletionEvent::StartMessage { .. } => { LanguageModelCompletionEvent::StartMessage { .. } => {
thread.insert_message(Role::Assistant, String::new(), cx); thread.insert_message(Role::Assistant, String::new(), cx);
@@ -640,7 +719,7 @@ impl Thread {
smol::future::yield_now().await; smol::future::yield_now().await;
} }
thread.update(&mut cx, |thread, cx| { thread.update(cx, |thread, cx| {
thread thread
.pending_completions .pending_completions
.retain(|completion| completion.id != pending_completion_id); .retain(|completion| completion.id != pending_completion_id);
@@ -656,7 +735,7 @@ impl Thread {
let result = stream_completion.await; let result = stream_completion.await;
thread thread
.update(&mut cx, |thread, cx| { .update(cx, |thread, cx| {
match result.as_ref() { match result.as_ref() {
Ok(stop_reason) => match stop_reason { Ok(stop_reason) => match stop_reason {
StopReason::ToolUse => { StopReason::ToolUse => {
@@ -683,10 +762,25 @@ impl Thread {
))); )));
} }
thread.cancel_last_completion(); thread.cancel_last_completion(cx);
} }
} }
cx.emit(ThreadEvent::DoneStreaming); cx.emit(ThreadEvent::DoneStreaming);
if let Ok(initial_usage) = initial_token_usage {
let usage = thread.cumulative_token_usage.clone() - initial_usage;
telemetry::event!(
"Assistant Thread Completion",
thread_id = thread.id().to_string(),
model = model.telemetry_id(),
model_provider = model.provider_id().to_string(),
input_tokens = usage.input_tokens,
output_tokens = usage.output_tokens,
cache_creation_input_tokens = usage.cache_creation_input_tokens,
cache_read_input_tokens = usage.cache_read_input_tokens,
);
}
}) })
.ok(); .ok();
}); });
@@ -719,7 +813,7 @@ impl Thread {
cache: false, cache: false,
}); });
self.pending_summary = cx.spawn(|this, mut cx| { self.pending_summary = cx.spawn(async move |this, cx| {
async move { async move {
let stream = model.stream_completion_text(request, &cx); let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?; let mut messages = stream.await?;
@@ -736,7 +830,7 @@ impl Thread {
} }
} }
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if !new_summary.is_empty() { if !new_summary.is_empty() {
this.summary = Some(new_summary.into()); this.summary = Some(new_summary.into());
} }
@@ -747,6 +841,7 @@ impl Thread {
anyhow::Ok(()) anyhow::Ok(())
} }
.log_err() .log_err()
.await
}); });
} }
@@ -792,10 +887,10 @@ impl Thread {
}); });
let session = self.scripting_session.clone(); let session = self.scripting_session.clone();
cx.spawn(|_, cx| async move { cx.spawn(async move |_, cx| {
script_task.await; script_task.await;
let message = session.read_with(&cx, |session, _cx| { let message = session.read_with(cx, |session, _cx| {
// Using a id to get the script output seems impractical. // Using a id to get the script output seems impractical.
// Why not just include it in the Task result? // Why not just include it in the Task result?
// This is because we'll later report the script state as it runs, // This is because we'll later report the script state as it runs,
@@ -820,12 +915,12 @@ impl Thread {
output: Task<Result<String>>, output: Task<Result<String>>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
let insert_output_task = cx.spawn(|thread, mut cx| { let insert_output_task = cx.spawn({
let tool_use_id = tool_use_id.clone(); let tool_use_id = tool_use_id.clone();
async move { async move |thread, cx| {
let output = output.await; let output = output.await;
thread thread
.update(&mut cx, |thread, cx| { .update(cx, |thread, cx| {
let pending_tool_use = thread let pending_tool_use = thread
.tool_use .tool_use
.insert_tool_output(tool_use_id.clone(), output); .insert_tool_output(tool_use_id.clone(), output);
@@ -833,6 +928,7 @@ impl Thread {
cx.emit(ThreadEvent::ToolFinished { cx.emit(ThreadEvent::ToolFinished {
tool_use_id, tool_use_id,
pending_tool_use, pending_tool_use,
canceled: false,
}); });
}) })
.ok(); .ok();
@@ -849,12 +945,12 @@ impl Thread {
output: Task<Result<String>>, output: Task<Result<String>>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
let insert_output_task = cx.spawn(|thread, mut cx| { let insert_output_task = cx.spawn({
let tool_use_id = tool_use_id.clone(); let tool_use_id = tool_use_id.clone();
async move { async move |thread, cx| {
let output = output.await; let output = output.await;
thread thread
.update(&mut cx, |thread, cx| { .update(cx, |thread, cx| {
let pending_tool_use = thread let pending_tool_use = thread
.scripting_tool_use .scripting_tool_use
.insert_tool_output(tool_use_id.clone(), output); .insert_tool_output(tool_use_id.clone(), output);
@@ -862,6 +958,7 @@ impl Thread {
cx.emit(ThreadEvent::ToolFinished { cx.emit(ThreadEvent::ToolFinished {
tool_use_id, tool_use_id,
pending_tool_use, pending_tool_use,
canceled: false,
}); });
}) })
.ok(); .ok();
@@ -872,9 +969,8 @@ impl Thread {
.run_pending_tool(tool_use_id, insert_output_task); .run_pending_tool(tool_use_id, insert_output_task);
} }
pub fn send_tool_results_to_model( pub fn attach_tool_results(
&mut self, &mut self,
model: Arc<dyn LanguageModel>,
updated_context: Vec<ContextSnapshot>, updated_context: Vec<ContextSnapshot>,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
@@ -891,19 +987,28 @@ impl Thread {
// so for now we provide some text to keep the model on track. // so for now we provide some text to keep the model on track.
"Here are the tool results.", "Here are the tool results.",
Vec::new(), Vec::new(),
None,
cx, cx,
); );
self.send_to_model(model, RequestKind::Chat, cx);
} }
/// Cancels the last pending completion, if there are any pending. /// Cancels the last pending completion, if there are any pending.
/// ///
/// Returns whether a completion was canceled. /// Returns whether a completion was canceled.
pub fn cancel_last_completion(&mut self) -> bool { pub fn cancel_last_completion(&mut self, cx: &mut Context<Self>) -> bool {
if let Some(_last_completion) = self.pending_completions.pop() { if self.pending_completions.pop().is_some() {
true true
} else { } else {
false let mut canceled = false;
for pending_tool_use in self.tool_use.cancel_pending() {
canceled = true;
cx.emit(ThreadEvent::ToolFinished {
tool_use_id: pending_tool_use.id.clone(),
pending_tool_use: Some(pending_tool_use),
canceled: true,
});
}
canceled
} }
} }
@@ -945,7 +1050,7 @@ impl Thread {
.map(|worktree| Self::worktree_snapshot(worktree, cx)) .map(|worktree| Self::worktree_snapshot(worktree, cx))
.collect(); .collect();
cx.spawn(move |_, cx| async move { cx.spawn(async move |_, cx| {
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await; let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
let mut unsaved_buffers = Vec::new(); let mut unsaved_buffers = Vec::new();
@@ -972,7 +1077,7 @@ impl Thread {
} }
fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> { fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> {
cx.spawn(move |cx| async move { cx.spawn(async move |cx| {
// Get worktree path and snapshot // Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| { let worktree_info = cx.update(|app_cx| {
let worktree = worktree.read(app_cx); let worktree = worktree.read(app_cx);
@@ -996,7 +1101,7 @@ impl Thread {
let current_branch = repo_entry.branch().map(|branch| branch.name.to_string()); let current_branch = repo_entry.branch().map(|branch| branch.name.to_string());
// Get repository info // Get repository info
let repo_result = worktree.read_with(&cx, |worktree, _cx| { let repo_result = worktree.read_with(cx, |worktree, _cx| {
if let project::Worktree::Local(local_worktree) = &worktree { if let project::Worktree::Local(local_worktree) = &worktree {
local_worktree.get_local_repo(repo_entry).map(|local_repo| { local_worktree.get_local_repo(repo_entry).map(|local_repo| {
let repo = local_repo.repo(); let repo = local_repo.repo();
@@ -1011,7 +1116,7 @@ impl Thread {
Ok(Some((remote_url, head_sha, repository))) => { Ok(Some((remote_url, head_sha, repository))) => {
// Get diff asynchronously // Get diff asynchronously
let diff = repository let diff = repository
.diff(git::repository::DiffType::HeadToWorktree, cx) .diff(git::repository::DiffType::HeadToWorktree, cx.clone())
.await .await
.ok(); .ok();
@@ -1086,6 +1191,10 @@ impl Thread {
&self.action_log &self.action_log
} }
pub fn project(&self) -> &Entity<Project> {
&self.project
}
pub fn cumulative_token_usage(&self) -> TokenUsage { pub fn cumulative_token_usage(&self) -> TokenUsage {
self.cumulative_token_usage.clone() self.cumulative_token_usage.clone()
} }
@@ -1114,6 +1223,8 @@ pub enum ThreadEvent {
tool_use_id: LanguageModelToolUseId, tool_use_id: LanguageModelToolUseId,
/// The pending tool use that corresponds to this tool. /// The pending tool use that corresponds to this tool.
pending_tool_use: Option<PendingToolUse>, pending_tool_use: Option<PendingToolUse>,
/// Whether the tool was canceled by the user.
canceled: bool,
}, },
} }

View File

@@ -65,6 +65,14 @@ impl ThreadStore {
Ok(this) Ok(this)
} }
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
self.context_server_manager.clone()
}
pub fn tools(&self) -> Arc<ToolWorkingSet> {
self.tools.clone()
}
/// Returns the number of threads. /// Returns the number of threads.
pub fn thread_count(&self) -> usize { pub fn thread_count(&self) -> usize {
self.threads.len() self.threads.len()
@@ -98,14 +106,14 @@ impl ThreadStore {
) -> Task<Result<Entity<Thread>>> { ) -> Task<Result<Entity<Thread>>> {
let id = id.clone(); let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx); let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?; let database = database_future.await.map_err(|err| anyhow!(err))?;
let thread = database let thread = database
.try_find_thread(id.clone()) .try_find_thread(id.clone())
.await? .await?
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?; .ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
cx.new(|cx| { cx.new(|cx| {
Thread::deserialize( Thread::deserialize(
id.clone(), id.clone(),
@@ -125,23 +133,23 @@ impl ThreadStore {
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx))); thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
let database_future = ThreadsDatabase::global_future(cx); let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let serialized_thread = serialized_thread.await?; let serialized_thread = serialized_thread.await?;
let database = database_future.await.map_err(|err| anyhow!(err))?; let database = database_future.await.map_err(|err| anyhow!(err))?;
database.save_thread(metadata, serialized_thread).await?; database.save_thread(metadata, serialized_thread).await?;
this.update(&mut cx, |this, cx| this.reload(cx))?.await this.update(cx, |this, cx| this.reload(cx))?.await
}) })
} }
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> { pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
let id = id.clone(); let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx); let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?; let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?; database.delete_thread(id.clone()).await?;
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.threads.retain(|thread| thread.id != id) this.threads.retain(|thread| thread.id != id)
}) })
}) })
@@ -149,14 +157,14 @@ impl ThreadStore {
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> { pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let database_future = ThreadsDatabase::global_future(cx); let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let threads = database_future let threads = database_future
.await .await
.map_err(|err| anyhow!(err))? .map_err(|err| anyhow!(err))?
.list_threads() .list_threads()
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.threads = threads; this.threads = threads;
cx.notify(); cx.notify();
}) })
@@ -185,7 +193,7 @@ impl ThreadStore {
cx.spawn({ cx.spawn({
let server = server.clone(); let server = server.clone();
let server_id = server_id.clone(); let server_id = server_id.clone();
|this, mut cx| async move { async move |this, cx| {
let Some(protocol) = server.client() else { let Some(protocol) = server.client() else {
return; return;
}; };
@@ -210,7 +218,7 @@ impl ThreadStore {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
this.update(&mut cx, |this, _cx| { this.update(cx, |this, _cx| {
this.context_server_tool_ids.insert(server_id, tool_ids); this.context_server_tool_ids.insert(server_id, tool_ids);
}) })
.log_err(); .log_err();

View File

@@ -5,13 +5,19 @@ use gpui::Entity;
use scripting_tool::ScriptingTool; use scripting_tool::ScriptingTool;
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip}; use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use crate::agent_profile::AgentProfile;
pub struct ToolSelector { pub struct ToolSelector {
profiles: Vec<AgentProfile>,
tools: Arc<ToolWorkingSet>, tools: Arc<ToolWorkingSet>,
} }
impl ToolSelector { impl ToolSelector {
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self { pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
Self { tools } Self {
profiles: vec![AgentProfile::read_only(), AgentProfile::code_writer()],
tools,
}
} }
fn build_context_menu( fn build_context_menu(
@@ -19,13 +25,36 @@ impl ToolSelector {
window: &mut Window, window: &mut Window,
cx: &mut Context<Self>, cx: &mut Context<Self>,
) -> Entity<ContextMenu> { ) -> Entity<ContextMenu> {
ContextMenu::build(window, cx, |mut menu, _window, cx| { let profiles = self.profiles.clone();
let tool_set = self.tools.clone();
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
let icon_position = IconPosition::End; let icon_position = IconPosition::End;
let tools_by_source = self.tools.tools_by_source(cx);
let all_tools_enabled = self.tools.are_all_tools_enabled(); menu = menu.header("Profiles");
for profile in profiles.clone() {
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
let tools = tool_set.clone();
move |_window, cx| {
tools.disable_source(ToolSource::Native, cx);
tools.enable(
ToolSource::Native,
&profile
.tools
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
);
}
});
}
menu = menu.separator();
let tools_by_source = tool_set.tools_by_source(cx);
let all_tools_enabled = tool_set.are_all_tools_enabled();
menu = menu.toggleable_entry("All Tools", all_tools_enabled, icon_position, None, { menu = menu.toggleable_entry("All Tools", all_tools_enabled, icon_position, None, {
let tools = self.tools.clone(); let tools = tool_set.clone();
move |_window, cx| { move |_window, cx| {
if all_tools_enabled { if all_tools_enabled {
tools.disable_all_tools(cx); tools.disable_all_tools(cx);
@@ -41,7 +70,7 @@ impl ToolSelector {
.map(|tool| { .map(|tool| {
let source = tool.source(); let source = tool.source();
let name = tool.name().into(); let name = tool.name().into();
let is_enabled = self.tools.is_enabled(&source, &name); let is_enabled = tool_set.is_enabled(&source, &name);
(source, name, is_enabled) (source, name, is_enabled)
}) })
@@ -51,7 +80,7 @@ impl ToolSelector {
tools.push(( tools.push((
ToolSource::Native, ToolSource::Native,
ScriptingTool::NAME.into(), ScriptingTool::NAME.into(),
self.tools.is_scripting_tool_enabled(), tool_set.is_scripting_tool_enabled(),
)); ));
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b)); tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
} }
@@ -60,7 +89,7 @@ impl ToolSelector {
ToolSource::Native => menu.separator().header("Zed Tools"), ToolSource::Native => menu.separator().header("Zed Tools"),
ToolSource::ContextServer { id } => { ToolSource::ContextServer { id } => {
let all_tools_from_source_enabled = let all_tools_from_source_enabled =
self.tools.are_all_tools_from_source_enabled(&source); tool_set.are_all_tools_from_source_enabled(&source);
menu.separator().header(id).toggleable_entry( menu.separator().header(id).toggleable_entry(
"All Tools", "All Tools",
@@ -68,7 +97,7 @@ impl ToolSelector {
icon_position, icon_position,
None, None,
{ {
let tools = self.tools.clone(); let tools = tool_set.clone();
let source = source.clone(); let source = source.clone();
move |_window, cx| { move |_window, cx| {
if all_tools_from_source_enabled { if all_tools_from_source_enabled {
@@ -84,7 +113,7 @@ impl ToolSelector {
for (source, name, is_enabled) in tools { for (source, name, is_enabled) in tools {
menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, { menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, {
let tools = self.tools.clone(); let tools = tool_set.clone();
move |_window, _cx| { move |_window, _cx| {
if name.as_ref() == ScriptingTool::NAME { if name.as_ref() == ScriptingTool::NAME {
if is_enabled { if is_enabled {

View File

@@ -118,6 +118,22 @@ impl ToolUseState {
this this
} }
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
let mut pending_tools = Vec::new();
for (tool_use_id, tool_use) in self.pending_tool_uses_by_id.drain() {
self.tool_results.insert(
tool_use_id.clone(),
LanguageModelToolResult {
tool_use_id,
content: "Tool canceled by user".into(),
is_error: true,
},
);
pending_tools.push(tool_use.clone());
}
pending_tools
}
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> { pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
self.pending_tool_uses_by_id.values().collect() self.pending_tool_uses_by_id.values().collect()
} }
@@ -270,9 +286,17 @@ impl ToolUseState {
) { ) {
if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) { if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) {
for tool_use in tool_uses { for tool_use in tool_uses {
request_message if self.tool_results.contains_key(&tool_use.id) {
.content // Do not send tool uses until they are completed
.push(MessageContent::ToolUse(tool_use.clone())); request_message
.content
.push(MessageContent::ToolUse(tool_use.clone()));
} else {
log::debug!(
"skipped tool use {:?} because it is still pending",
tool_use
);
}
} }
} }
} }
@@ -285,9 +309,19 @@ impl ToolUseState {
if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) { if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) {
for tool_use_id in tool_uses { for tool_use_id in tool_uses {
if let Some(tool_result) = self.tool_results.get(tool_use_id) { if let Some(tool_result) = self.tool_results.get(tool_use_id) {
request_message request_message.content.push(MessageContent::ToolResult(
.content LanguageModelToolResult {
.push(MessageContent::ToolResult(tool_result.clone())); tool_use_id: tool_use_id.clone(),
is_error: tool_result.is_error,
content: if tool_result.content.is_empty() {
// Surprisingly, the API fails if we return an empty string here.
// It thinks we are sending a tool use without a tool result.
"<Tool returned an empty string>".into()
} else {
tool_result.content.clone()
},
},
));
} }
} }
} }

View File

@@ -1144,9 +1144,9 @@ impl AssistantContext {
fn set_language(&mut self, cx: &mut Context<Self>) { fn set_language(&mut self, cx: &mut Context<Self>) {
let markdown = self.language_registry.language_for_name("Markdown"); let markdown = self.language_registry.language_for_name("Markdown");
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let markdown = markdown.await?; let markdown = markdown.await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.buffer this.buffer
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx)); .update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
}) })
@@ -1188,7 +1188,7 @@ impl AssistantContext {
return; return;
}; };
let debounce = self.token_count.is_some(); let debounce = self.token_count.is_some();
self.pending_token_count = cx.spawn(|this, mut cx| { self.pending_token_count = cx.spawn(async move |this, cx| {
async move { async move {
if debounce { if debounce {
cx.background_executor() cx.background_executor()
@@ -1197,13 +1197,14 @@ impl AssistantContext {
} }
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?; let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.token_count = Some(token_count); this.token_count = Some(token_count);
this.start_cache_warming(&model, cx); this.start_cache_warming(&model, cx);
cx.notify() cx.notify()
}) })
} }
.log_err() .log_err()
.await
}); });
} }
@@ -1342,7 +1343,7 @@ impl AssistantContext {
}; };
let model = Arc::clone(model); let model = Arc::clone(model);
self.pending_cache_warming_task = cx.spawn(|this, mut cx| { self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
async move { async move {
match model.stream_completion(request, &cx).await { match model.stream_completion(request, &cx).await {
Ok(mut stream) => { Ok(mut stream) => {
@@ -1353,13 +1354,14 @@ impl AssistantContext {
log::warn!("Cache warming failed: {}", e); log::warn!("Cache warming failed: {}", e);
} }
}; };
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.update_cache_status_for_completion(cx); this.update_cache_status_for_completion(cx);
}) })
.ok(); .ok();
anyhow::Ok(()) anyhow::Ok(())
} }
.log_err() .log_err()
.await
}); });
} }
@@ -1916,7 +1918,7 @@ impl AssistantContext {
}); });
self.reparse(cx); self.reparse(cx);
let insert_output_task = cx.spawn(|this, mut cx| async move { let insert_output_task = cx.spawn(async move |this, cx| {
let run_command = async { let run_command = async {
let mut stream = output.await?; let mut stream = output.await?;
@@ -1933,7 +1935,7 @@ impl AssistantContext {
while let Some(event) = stream.next().await { while let Some(event) = stream.next().await {
let event = event?; let event = event?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.buffer.update(cx, |buffer, _cx| { this.buffer.update(cx, |buffer, _cx| {
buffer.finalize_last_transaction(); buffer.finalize_last_transaction();
buffer.start_transaction() buffer.start_transaction()
@@ -2034,7 +2036,7 @@ impl AssistantContext {
})?; })?;
} }
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.buffer.update(cx, |buffer, cx| { this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction(); buffer.finalize_last_transaction();
buffer.start_transaction(); buffer.start_transaction();
@@ -2080,7 +2082,7 @@ impl AssistantContext {
let command_result = run_command.await; let command_result = run_command.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let version = this.version.clone(); let version = this.version.clone();
let timestamp = this.next_timestamp(); let timestamp = this.next_timestamp();
let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id) let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id)
@@ -2210,7 +2212,7 @@ impl AssistantContext {
let pending_completion_id = post_inc(&mut self.completion_count); let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn({ let task = cx.spawn({
|this, mut cx| async move { async move |this, cx| {
let stream = model.stream_completion(request, &cx); let stream = model.stream_completion(request, &cx);
let assistant_message_id = assistant_message.id; let assistant_message_id = assistant_message.id;
let mut response_latency = None; let mut response_latency = None;
@@ -2225,7 +2227,7 @@ impl AssistantContext {
} }
let event = event?; let event = event?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let message_ix = this let message_ix = this
.message_anchors .message_anchors
.iter() .iter()
@@ -2264,7 +2266,7 @@ impl AssistantContext {
})?; })?;
smol::future::yield_now().await; smol::future::yield_now().await;
} }
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.pending_completions this.pending_completions
.retain(|completion| completion.id != pending_completion_id); .retain(|completion| completion.id != pending_completion_id);
this.summarize(false, cx); this.summarize(false, cx);
@@ -2276,7 +2278,7 @@ impl AssistantContext {
let result = stream_completion.await; let result = stream_completion.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let error_message = if let Some(error) = result.as_ref().err() { let error_message = if let Some(error) = result.as_ref().err() {
if error.is::<PaymentRequiredError>() { if error.is::<PaymentRequiredError>() {
cx.emit(ContextEvent::ShowPaymentRequiredError); cx.emit(ContextEvent::ShowPaymentRequiredError);
@@ -2786,7 +2788,7 @@ impl AssistantContext {
cache: false, cache: false,
}); });
self.pending_summary = cx.spawn(|this, mut cx| { self.pending_summary = cx.spawn(async move |this, cx| {
async move { async move {
let stream = model.stream_completion_text(request, &cx); let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?; let mut messages = stream.await?;
@@ -2795,7 +2797,7 @@ impl AssistantContext {
while let Some(message) = messages.stream.next().await { while let Some(message) = messages.stream.next().await {
let text = message?; let text = message?;
let mut lines = text.lines(); let mut lines = text.lines();
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let version = this.version.clone(); let version = this.version.clone();
let timestamp = this.next_timestamp(); let timestamp = this.next_timestamp();
let summary = this.summary.get_or_insert(ContextSummary::default()); let summary = this.summary.get_or_insert(ContextSummary::default());
@@ -2819,7 +2821,7 @@ impl AssistantContext {
} }
} }
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let version = this.version.clone(); let version = this.version.clone();
let timestamp = this.next_timestamp(); let timestamp = this.next_timestamp();
if let Some(summary) = this.summary.as_mut() { if let Some(summary) = this.summary.as_mut() {
@@ -2837,6 +2839,7 @@ impl AssistantContext {
anyhow::Ok(()) anyhow::Ok(())
} }
.log_err() .log_err()
.await
}); });
} }
} }
@@ -2943,12 +2946,12 @@ impl AssistantContext {
return; return;
} }
self.pending_save = cx.spawn(|this, mut cx| async move { self.pending_save = cx.spawn(async move |this, cx| {
if let Some(debounce) = debounce { if let Some(debounce) = debounce {
cx.background_executor().timer(debounce).await; cx.background_executor().timer(debounce).await;
} }
let (old_path, summary) = this.read_with(&cx, |this, _| { let (old_path, summary) = this.read_with(cx, |this, _| {
let path = this.path.clone(); let path = this.path.clone();
let summary = if let Some(summary) = this.summary.as_ref() { let summary = if let Some(summary) = this.summary.as_ref() {
if summary.done { if summary.done {
@@ -2963,7 +2966,7 @@ impl AssistantContext {
})?; })?;
if let Some(summary) = summary { if let Some(summary) = summary {
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?; let context = this.read_with(cx, |this, cx| this.serialize(cx))?;
let mut discriminant = 1; let mut discriminant = 1;
let mut new_path; let mut new_path;
loop { loop {
@@ -2995,7 +2998,7 @@ impl AssistantContext {
} }
} }
this.update(&mut cx, |this, _| this.path = Some(new_path))?; this.update(cx, |this, _| this.path = Some(new_path))?;
} }
Ok(()) Ok(())

View File

@@ -229,6 +229,7 @@ impl ContextEditor {
editor.set_show_git_diff_gutter(false, cx); editor.set_show_git_diff_gutter(false, cx);
editor.set_show_code_actions(false, cx); editor.set_show_code_actions(false, cx);
editor.set_show_runnables(false, cx); editor.set_show_runnables(false, cx);
editor.set_show_breakpoints(false, cx);
editor.set_show_wrap_guides(false, cx); editor.set_show_wrap_guides(false, cx);
editor.set_show_indent_guides(false, cx); editor.set_show_indent_guides(false, cx);
editor.set_completion_provider(Some(Box::new(completion_provider))); editor.set_completion_provider(Some(Box::new(completion_provider)));
@@ -906,7 +907,7 @@ impl ContextEditor {
if editor_state.opened_patch != patch { if editor_state.opened_patch != patch {
state.update_task = Some({ state.update_task = Some({
let this = this.clone(); let this = this.clone();
cx.spawn_in(window, |_, cx| async move { cx.spawn_in(window, async move |_, cx| {
Self::update_patch_editor(this.clone(), patch, cx) Self::update_patch_editor(this.clone(), patch, cx)
.await .await
.log_err(); .log_err();
@@ -1069,10 +1070,9 @@ impl ContextEditor {
}) })
.ok(); .ok();
} else { } else {
patch_state.update_task = patch_state.update_task = Some(cx.spawn_in(window, async move |this, cx| {
Some(cx.spawn_in(window, move |this, cx| async move { Self::open_patch_editor(this, new_patch, cx).await.log_err();
Self::open_patch_editor(this, new_patch, cx).await.log_err(); }));
}));
} }
} }
} }
@@ -1102,10 +1102,10 @@ impl ContextEditor {
async fn open_patch_editor( async fn open_patch_editor(
this: WeakEntity<Self>, this: WeakEntity<Self>,
patch: AssistantPatch, patch: AssistantPatch,
mut cx: AsyncWindowContext, cx: &mut AsyncWindowContext,
) -> Result<()> { ) -> Result<()> {
let project = this.read_with(&cx, |this, _| this.project.clone())?; let project = this.read_with(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await; let resolved_patch = patch.resolve(project.clone(), cx).await;
let editor = cx.new_window_entity(|window, cx| { let editor = cx.new_window_entity(|window, cx| {
let editor = ProposedChangesEditor::new( let editor = ProposedChangesEditor::new(
@@ -1129,7 +1129,7 @@ impl ContextEditor {
editor editor
})?; })?;
this.update(&mut cx, |this, _| { this.update(cx, |this, _| {
if let Some(patch_state) = this.patches.get_mut(&patch.range) { if let Some(patch_state) = this.patches.get_mut(&patch.range) {
patch_state.editor = Some(PatchEditorState { patch_state.editor = Some(PatchEditorState {
editor: editor.downgrade(), editor: editor.downgrade(),
@@ -1138,8 +1138,8 @@ impl ContextEditor {
patch_state.update_task.take(); patch_state.update_task.take();
} }
})?; })?;
this.read_with(&cx, |this, _| this.workspace.clone())? this.read_with(cx, |this, _| this.workspace.clone())?
.update_in(&mut cx, |workspace, window, cx| { .update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx) workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx)
}) })
.log_err(); .log_err();
@@ -1150,11 +1150,11 @@ impl ContextEditor {
async fn update_patch_editor( async fn update_patch_editor(
this: WeakEntity<Self>, this: WeakEntity<Self>,
patch: AssistantPatch, patch: AssistantPatch,
mut cx: AsyncWindowContext, cx: &mut AsyncWindowContext,
) -> Result<()> { ) -> Result<()> {
let project = this.update(&mut cx, |this, _| this.project.clone())?; let project = this.update(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await; let resolved_patch = patch.resolve(project.clone(), cx).await;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
let patch_state = this.patches.get_mut(&patch.range)?; let patch_state = this.patches.get_mut(&patch.range)?;
let locations = resolved_patch let locations = resolved_patch
@@ -1624,14 +1624,14 @@ impl ContextEditor {
.map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx)) .map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.spawn(move |_, cx| async move { cx.spawn(async move |_, cx| {
let mut paths = vec![]; let mut paths = vec![];
let mut worktrees = vec![]; let mut worktrees = vec![];
let opened_paths = futures::future::join_all(tasks).await; let opened_paths = futures::future::join_all(tasks).await;
for (worktree, project_path) in opened_paths.into_iter().flatten() { for (worktree, project_path) in opened_paths.into_iter().flatten() {
let Ok(worktree_root_name) = let Ok(worktree_root_name) =
worktree.read_with(&cx, |worktree, _| worktree.root_name().to_string()) worktree.read_with(cx, |worktree, _| worktree.root_name().to_string())
else { else {
continue; continue;
}; };
@@ -1648,12 +1648,12 @@ impl ContextEditor {
}; };
window window
.spawn(cx, |mut cx| async move { .spawn(cx, async move |cx| {
let (paths, dragged_file_worktrees) = paths.await; let (paths, dragged_file_worktrees) = paths.await;
let cmd_name = FileSlashCommand.name(); let cmd_name = FileSlashCommand.name();
context_editor_view context_editor_view
.update_in(&mut cx, |context_editor, window, cx| { .update_in(cx, |context_editor, window, cx| {
let file_argument = paths let file_argument = paths
.into_iter() .into_iter()
.map(|path| path.to_string_lossy().to_string()) .map(|path| path.to_string_lossy().to_string())
@@ -2199,9 +2199,9 @@ impl ContextEditor {
.log_err(); .log_err();
if let Some(client) = client { if let Some(client) = client {
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
client.authenticate_and_connect(true, &mut cx).await?; client.authenticate_and_connect(true, cx).await?;
this.update(&mut cx, |_, cx| cx.notify()) this.update(cx, |_, cx| cx.notify())
}) })
.detach_and_log_err(cx) .detach_and_log_err(cx)
} }
@@ -3160,10 +3160,10 @@ impl FollowableItem for ContextEditor {
assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx) assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx)
}); });
Some(window.spawn(cx, |mut cx| async move { Some(window.spawn(cx, async move |cx| {
let context_editor = context_editor_task.await?; let context_editor = context_editor_task.await?;
context_editor context_editor
.update_in(&mut cx, |context_editor, window, cx| { .update_in(cx, |context_editor, window, cx| {
context_editor.remote_id = Some(id); context_editor.remote_id = Some(id);
context_editor.editor.update(cx, |editor, cx| { context_editor.editor.update(cx, |editor, cx| {
editor.apply_update_proto( editor.apply_update_proto(

View File

@@ -164,9 +164,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
cx: &mut Context<Picker<Self>>, cx: &mut Context<Picker<Self>>,
) -> Task<()> { ) -> Task<()> {
let search = self.store.read(cx).search(query, cx); let search = self.store.read(cx).search(query, cx);
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let matches = search.await; let matches = search.await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
let host_contexts = this.delegate.store.read(cx).host_contexts(); let host_contexts = this.delegate.store.read(cx).host_contexts();
this.delegate.matches = host_contexts this.delegate.matches = host_contexts
.iter() .iter()

View File

@@ -100,7 +100,7 @@ impl ContextStore {
let fs = project.read(cx).fs().clone(); let fs = project.read(cx).fs().clone();
let languages = project.read(cx).languages().clone(); let languages = project.read(cx).languages().clone();
let telemetry = project.read(cx).client().telemetry().clone(); let telemetry = project.read(cx).client().telemetry().clone();
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100); const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await; let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
@@ -125,16 +125,15 @@ impl ContextStore {
languages, languages,
slash_commands, slash_commands,
telemetry, telemetry,
_watch_updates: cx.spawn(|this, mut cx| { _watch_updates: cx.spawn(async move |this, cx| {
async move { async move {
while events.next().await.is_some() { while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))? this.update(cx, |this, cx| this.reload(cx))?.await.log_err();
.await
.log_err();
} }
anyhow::Ok(()) anyhow::Ok(())
} }
.log_err() .log_err()
.await
}), }),
client_subscription: None, client_subscription: None,
_project_subscriptions: vec![ _project_subscriptions: vec![
@@ -395,7 +394,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone(); let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone(); let slash_commands = self.slash_commands.clone();
let request = self.client.request(proto::CreateContext { project_id }); let request = self.client.request(proto::CreateContext { project_id });
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let response = request.await?; let response = request.await?;
let context_id = ContextId::from_proto(response.context_id); let context_id = ContextId::from_proto(response.context_id);
let context_proto = response.context.context("invalid context")?; let context_proto = response.context.context("invalid context")?;
@@ -421,8 +420,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>() .collect::<Result<Vec<_>>>()
}) })
.await?; .await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context existing_context
} else { } else {
@@ -457,7 +456,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone(); let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone(); let slash_commands = self.slash_commands.clone();
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let saved_context = load.await?; let saved_context = load.await?;
let context = cx.new(|cx| { let context = cx.new(|cx| {
AssistantContext::deserialize( AssistantContext::deserialize(
@@ -471,7 +470,7 @@ impl ContextStore {
cx, cx,
) )
})?; })?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) { if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
existing_context existing_context
} else { } else {
@@ -489,7 +488,7 @@ impl ContextStore {
) -> Task<Result<()>> { ) -> Task<Result<()>> {
let fs = self.fs.clone(); let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
fs.remove_file( fs.remove_file(
&path, &path,
RemoveOptions { RemoveOptions {
@@ -499,7 +498,7 @@ impl ContextStore {
) )
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.contexts.retain(|context| { this.contexts.retain(|context| {
context context
.upgrade() .upgrade()
@@ -565,7 +564,7 @@ impl ContextStore {
}); });
let prompt_builder = self.prompt_builder.clone(); let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone(); let slash_commands = self.slash_commands.clone();
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let response = request.await?; let response = request.await?;
let context_proto = response.context.context("invalid context")?; let context_proto = response.context.context("invalid context")?;
let context = cx.new(|cx| { let context = cx.new(|cx| {
@@ -590,8 +589,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>() .collect::<Result<Vec<_>>>()
}) })
.await?; .await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context existing_context
} else { } else {
@@ -700,12 +699,12 @@ impl ContextStore {
project_id, project_id,
contexts, contexts,
}); });
cx.spawn(|this, cx| async move { cx.spawn(async move |this, cx| {
let response = request.await?; let response = request.await?;
let mut context_ids = Vec::new(); let mut context_ids = Vec::new();
let mut operations = Vec::new(); let mut operations = Vec::new();
this.read_with(&cx, |this, cx| { this.read_with(cx, |this, cx| {
for context_version_proto in response.contexts { for context_version_proto in response.contexts {
let context_version = ContextVersion::from_proto(&context_version_proto); let context_version = ContextVersion::from_proto(&context_version_proto);
let context_id = ContextId::from_proto(context_version_proto.context_id); let context_id = ContextId::from_proto(context_version_proto.context_id);
@@ -768,7 +767,7 @@ impl ContextStore {
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> { fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let fs = self.fs.clone(); let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
fs.create_dir(contexts_dir()).await?; fs.create_dir(contexts_dir()).await?;
let mut paths = fs.read_dir(contexts_dir()).await?; let mut paths = fs.read_dir(contexts_dir()).await?;
@@ -808,7 +807,7 @@ impl ContextStore {
} }
contexts.sort_unstable_by_key(|context| Reverse(context.mtime)); contexts.sort_unstable_by_key(|context| Reverse(context.mtime));
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.contexts_metadata = contexts; this.contexts_metadata = contexts;
cx.notify(); cx.notify();
}) })
@@ -819,7 +818,7 @@ impl ContextStore {
cx.update_entity( cx.update_entity(
&self.context_server_manager, &self.context_server_manager,
|context_server_manager, cx| { |context_server_manager, cx| {
for server in context_server_manager.servers() { for server in context_server_manager.running_servers() {
context_server_manager context_server_manager
.restart_server(&server.id(), cx) .restart_server(&server.id(), cx)
.detach_and_log_err(cx); .detach_and_log_err(cx);
@@ -850,7 +849,7 @@ impl ContextStore {
cx.spawn({ cx.spawn({
let server = server.clone(); let server = server.clone();
let server_id = server_id.clone(); let server_id = server_id.clone();
|this, mut cx| async move { async move |this, cx| {
let Some(protocol) = server.client() else { let Some(protocol) = server.client() else {
return; return;
}; };
@@ -875,7 +874,7 @@ impl ContextStore {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
this.update(&mut cx, |this, _cx| { this.update( cx, |this, _cx| {
this.context_server_slash_command_ids this.context_server_slash_command_ids
.insert(server_id.clone(), slash_command_ids); .insert(server_id.clone(), slash_command_ids);
}) })

View File

@@ -59,7 +59,7 @@ impl SlashCommandCompletionProvider {
let command_name = command_name.to_string(); let command_name = command_name.to_string();
let editor = self.editor.clone(); let editor = self.editor.clone();
let workspace = self.workspace.clone(); let workspace = self.workspace.clone();
window.spawn(cx, |mut cx| async move { window.spawn(cx, async move |cx| {
let matches = match_strings( let matches = match_strings(
&candidates, &candidates,
&command_name, &command_name,

View File

@@ -100,7 +100,7 @@ impl PickerDelegate for SlashCommandDelegate {
cx: &mut Context<Picker<Self>>, cx: &mut Context<Picker<Self>>,
) -> Task<()> { ) -> Task<()> {
let all_commands = self.all_commands.clone(); let all_commands = self.all_commands.clone();
cx.spawn_in(window, |this, mut cx| async move { cx.spawn_in(window, async move |this, cx| {
let filtered_commands = cx let filtered_commands = cx
.background_spawn(async move { .background_spawn(async move {
if query.is_empty() { if query.is_empty() {
@@ -119,7 +119,7 @@ impl PickerDelegate for SlashCommandDelegate {
}) })
.await; .await;
this.update_in(&mut cx, |this, window, cx| { this.update_in(cx, |this, window, cx| {
this.delegate.filtered_commands = filtered_commands; this.delegate.filtered_commands = filtered_commands;
this.delegate.set_selected_index(0, window, cx); this.delegate.set_selected_index(0, window, cx);
cx.notify(); cx.notify();

View File

@@ -63,14 +63,14 @@ impl Eval {
model: Arc<dyn LanguageModel>, model: Arc<dyn LanguageModel>,
cx: &mut App, cx: &mut App,
) -> Task<anyhow::Result<EvalOutput>> { ) -> Task<anyhow::Result<EvalOutput>> {
cx.spawn(move |mut cx| async move { cx.spawn(async move |cx| {
checkout_repo(&self.eval_setup, &self.repo_path).await?; checkout_repo(&self.eval_setup, &self.repo_path).await?;
let (assistant, done_rx) = let (assistant, done_rx) =
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??; cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
let _worktree = assistant let _worktree = assistant
.update(&mut cx, |assistant, cx| { .update(cx, |assistant, cx| {
assistant.project.update(cx, |project, cx| { assistant.project.update(cx, |project, cx| {
project.create_worktree(&self.repo_path, true, cx) project.create_worktree(&self.repo_path, true, cx)
}) })
@@ -79,10 +79,10 @@ impl Eval {
let start_time = std::time::SystemTime::now(); let start_time = std::time::SystemTime::now();
assistant.update(&mut cx, |assistant, cx| { assistant.update(cx, |assistant, cx| {
assistant.thread.update(cx, |thread, cx| { assistant.thread.update(cx, |thread, cx| {
let context = vec![]; let context = vec![];
thread.insert_user_message(self.user_prompt.clone(), context, cx); thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
thread.send_to_model(model, RequestKind::Chat, cx); thread.send_to_model(model, RequestKind::Chat, cx);
}); });
})?; })?;
@@ -93,7 +93,7 @@ impl Eval {
let diff = query_git(&self.repo_path, vec!["diff"]).await?; let diff = query_git(&self.repo_path, vec!["diff"]).await?;
assistant.update(&mut cx, |assistant, cx| { assistant.update(cx, |assistant, cx| {
let thread = assistant.thread.read(cx); let thread = assistant.thread.read(cx);
let last_message = thread.messages().last().unwrap(); let last_message = thread.messages().last().unwrap();
if last_message.role != language_model::Role::Assistant { if last_message.role != language_model::Role::Assistant {

View File

@@ -1,5 +1,5 @@
use anyhow::anyhow; use anyhow::anyhow;
use assistant2::{Thread, ThreadEvent, ThreadStore}; use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
use assistant_tool::ToolWorkingSet; use assistant_tool::ToolWorkingSet;
use client::{Client, UserStore}; use client::{Client, UserStore};
use collections::HashMap; use collections::HashMap;
@@ -103,6 +103,7 @@ impl HeadlessAssistant {
ThreadEvent::ToolFinished { ThreadEvent::ToolFinished {
tool_use_id, tool_use_id,
pending_tool_use, pending_tool_use,
..
} => { } => {
if let Some(pending_tool_use) = pending_tool_use { if let Some(pending_tool_use) = pending_tool_use {
println!( println!(
@@ -121,9 +122,8 @@ impl HeadlessAssistant {
let model_registry = LanguageModelRegistry::read_global(cx); let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() { if let Some(model) = model_registry.active_model() {
thread.update(cx, |thread, cx| { thread.update(cx, |thread, cx| {
// Currently evals do not support specifying context. thread.attach_tool_results(vec![], cx);
let updated_context = vec![]; thread.send_to_model(model, RequestKind::Chat, cx);
thread.send_tool_results_to_model(model, updated_context, cx);
}); });
} }
} }
@@ -163,7 +163,7 @@ pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
language::init(cx); language::init(cx);
language_model::init(client.clone(), cx); language_model::init(client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx); language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
assistant_tools::init(cx); assistant_tools::init(client.http_client().clone(), cx);
context_server::init(cx); context_server::init(cx);
let stdout_is_a_pty = false; let stdout_is_a_pty = false;
let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx); let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx);
@@ -212,7 +212,7 @@ pub fn authenticate_model_provider(
pub async fn send_language_model_request( pub async fn send_language_model_request(
model: Arc<dyn LanguageModel>, model: Arc<dyn LanguageModel>,
request: LanguageModelRequest, request: LanguageModelRequest,
cx: AsyncApp, cx: &mut AsyncApp,
) -> anyhow::Result<String> { ) -> anyhow::Result<String> {
match model.stream_completion_text(request, &cx).await { match model.stream_completion_text(request, &cx).await {
Ok(mut stream) => { Ok(mut stream) => {

View File

@@ -61,7 +61,7 @@ impl Judge {
}; };
let model = self.model.clone(); let model = self.model.clone();
cx.spawn(move |cx| send_language_model_request(model, request, cx)) cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
} }
} }

View File

@@ -4,7 +4,7 @@ mod judge;
use clap::Parser; use clap::Parser;
use eval::{Eval, EvalOutput}; use eval::{Eval, EvalOutput};
use futures::{stream, StreamExt}; use futures::future;
use gpui::{Application, AsyncApp}; use gpui::{Application, AsyncApp};
use headless_assistant::{authenticate_model_provider, find_model, HeadlessAppState}; use headless_assistant::{authenticate_model_provider, find_model, HeadlessAppState};
use itertools::Itertools; use itertools::Itertools;
@@ -48,7 +48,12 @@ fn main() {
let crate_dir = PathBuf::from("../zed-agent-bench"); let crate_dir = PathBuf::from("../zed-agent-bench");
let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap(); let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap();
let repos_dir = crate_dir.join("repos").canonicalize().unwrap();
let repos_dir = crate_dir.join("repos");
if !repos_dir.exists() {
std::fs::create_dir_all(&repos_dir).unwrap();
}
let repos_dir = repos_dir.canonicalize().unwrap();
let all_evals = std::fs::read_dir(&evaluation_data_dir) let all_evals = std::fs::read_dir(&evaluation_data_dir)
.unwrap() .unwrap()
@@ -106,7 +111,7 @@ fn main() {
let editor_model_provider_id = editor_model.provider_id(); let editor_model_provider_id = editor_model.provider_id();
let judge_model_provider_id = judge_model.provider_id(); let judge_model_provider_id = judge_model.provider_id();
cx.spawn(move |cx| async move { cx.spawn(async move |cx| {
// Authenticate all model providers first // Authenticate all model providers first
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx)) cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
.unwrap() .unwrap()
@@ -121,12 +126,13 @@ fn main() {
.await .await
.unwrap(); .unwrap();
let loaded_evals = stream::iter(evals_to_run) let eval_load_futures = evals_to_run
.into_iter()
.map(|eval_name| { .map(|eval_name| {
let eval_path = evaluation_data_dir.join(&eval_name); let eval_path = evaluation_data_dir.join(&eval_name);
let repos_dir = repos_dir.clone(); let load_future = Eval::load(eval_name.clone(), eval_path, &repos_dir);
async move { async move {
match Eval::load(eval_name.clone(), eval_path, &repos_dir).await { match load_future.await {
Ok(eval) => Some(eval), Ok(eval) => Some(eval),
Err(err) => { Err(err) => {
// TODO: Persist errors / surface errors at the end. // TODO: Persist errors / surface errors at the end.
@@ -136,8 +142,9 @@ fn main() {
} }
} }
}) })
.buffer_unordered(args.concurrency) .collect::<Vec<_>>();
.collect::<Vec<_>>()
let loaded_evals = future::join_all(eval_load_futures)
.await .await
.into_iter() .into_iter()
.flatten() .flatten()
@@ -155,7 +162,8 @@ fn main() {
// Sort groups in descending order, so that bigger groups start first. // Sort groups in descending order, so that bigger groups start first.
evals_grouped_by_url.sort_by_key(|evals| cmp::Reverse(evals.len())); evals_grouped_by_url.sort_by_key(|evals| cmp::Reverse(evals.len()));
let results = stream::iter(evals_grouped_by_url) let result_futures = evals_grouped_by_url
.into_iter()
.map(|evals| { .map(|evals| {
let model = model.clone(); let model = model.clone();
let judge_model = judge_model.clone(); let judge_model = judge_model.clone();
@@ -180,8 +188,9 @@ fn main() {
results results
} }
}) })
.buffer_unordered(args.concurrency) .collect::<Vec<_>>();
.collect::<Vec<_>>()
let results = future::join_all(result_futures)
.await .await
.into_iter() .into_iter()
.flatten() .flatten()

View File

@@ -77,8 +77,8 @@ impl SlashCommand for AutoCommand {
let cx: &mut App = cx; let cx: &mut App = cx;
cx.spawn(|cx: gpui::AsyncApp| async move { cx.spawn(async move |cx| {
let task = project_index.read_with(&cx, |project_index, cx| { let task = project_index.read_with(cx, |project_index, cx| {
project_index.flush_summary_backlogs(cx) project_index.flush_summary_backlogs(cx)
})?; })?;
@@ -117,9 +117,9 @@ impl SlashCommand for AutoCommand {
return Task::ready(Err(anyhow!("no project indexer"))); return Task::ready(Err(anyhow!("no project indexer")));
}; };
let task = window.spawn(cx, |cx| async move { let task = window.spawn(cx, async move |cx| {
let summaries = project_index let summaries = project_index
.read_with(&cx, |project_index, cx| project_index.all_summaries(cx))? .read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
.await?; .await?;
commands_for_summaries(&summaries, &original_prompt, &cx).await commands_for_summaries(&summaries, &original_prompt, &cx).await

View File

@@ -186,7 +186,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx); let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
window.spawn(cx, move |_| async move { window.spawn(cx, async move |_| {
task.await? task.await?
.map(|output| output.to_event_stream()) .map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found")) .ok_or_else(|| anyhow!("No diagnostics found"))
@@ -268,7 +268,7 @@ fn collect_diagnostics(
}) })
.collect(); .collect();
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
let mut output = SlashCommandOutput::default(); let mut output = SlashCommandOutput::default();
if let Some(error_source) = error_source.as_ref() { if let Some(error_source) = error_source.as_ref() {
@@ -299,7 +299,7 @@ fn collect_diagnostics(
} }
if let Some(buffer) = project_handle if let Some(buffer) = project_handle
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))? .update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await .await
.log_err() .log_err()
{ {

View File

@@ -241,7 +241,7 @@ fn collect_files(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded(); let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
for snapshot in snapshots { for snapshot in snapshots {
let worktree_id = snapshot.id(); let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new(); let mut directory_stack: Vec<Arc<Path>> = Vec::new();
@@ -352,7 +352,7 @@ fn collect_files(
)))?; )))?;
} else if entry.is_file() { } else if entry.is_file() {
let Some(open_buffer_task) = project_handle let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| { .update(cx, |project, cx| {
project.open_buffer((worktree_id, &entry.path), cx) project.open_buffer((worktree_id, &entry.path), cx)
}) })
.ok() .ok()
@@ -361,7 +361,7 @@ fn collect_files(
}; };
if let Some(buffer) = open_buffer_task.await.log_err() { if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default(); let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?; let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output( append_buffer_to_output(
&snapshot, &snapshot,
Some(&path_including_worktree_name), Some(&path_including_worktree_name),

View File

@@ -99,7 +99,7 @@ impl SlashCommand for ProjectSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer"))); return Task::ready(Err(anyhow::anyhow!("no project indexer")));
}; };
window.spawn(cx, |mut cx| async move { window.spawn(cx, async move |cx| {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?; let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt = let prompt =
@@ -123,7 +123,7 @@ impl SlashCommand for ProjectSlashCommand {
.search_queries; .search_queries;
let results = project_index let results = project_index
.read_with(&cx, |project_index, cx| { .read_with(cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx) project_index.search(search_queries.clone(), 25, cx)
})? })?
.await?; .await?;

View File

@@ -109,9 +109,9 @@ impl SlashCommand for SearchSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer"))); return Task::ready(Err(anyhow::anyhow!("no project indexer")));
}; };
window.spawn(cx, |cx| async move { window.spawn(cx, async move |cx| {
let results = project_index let results = project_index
.read_with(&cx, |project_index, cx| { .read_with(cx, |project_index, cx| {
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx) project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})? })?
.await?; .await?;

View File

@@ -86,7 +86,7 @@ impl SlashCommand for TabSlashCommand {
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx); tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
window.spawn(cx, |_| async move { window.spawn(cx, async move |_| {
let tab_items = tab_items_search.await?; let tab_items = tab_items_search.await?;
let run_command = tab_items.len() == 1; let run_command = tab_items.len() == 1;
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| { let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
@@ -172,11 +172,11 @@ fn tab_items_for_queries(
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> { ) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty()); let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
let queries = queries.to_owned(); let queries = queries.to_owned();
window.spawn(cx, |mut cx| async move { window.spawn(cx, async move |cx| {
let mut open_buffers = let mut open_buffers =
workspace workspace
.context("no workspace")? .context("no workspace")?
.update(&mut cx, |workspace, cx| { .update(cx, |workspace, cx| {
if strict_match && empty_query { if strict_match && empty_query {
let snapshot = active_item_buffer(workspace, cx)?; let snapshot = active_item_buffer(workspace, cx)?;
let full_path = snapshot.resolve_file_path(cx, true); let full_path = snapshot.resolve_file_path(cx, true);

View File

@@ -14,6 +14,7 @@ path = "src/assistant_tool.rs"
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
collections.workspace = true collections.workspace = true
clock.workspace = true
derive_more.workspace = true derive_more.workspace = true
gpui.workspace = true gpui.workspace = true
language.workspace = true language.workspace = true

View File

@@ -4,7 +4,7 @@ mod tool_working_set;
use std::sync::Arc; use std::sync::Arc;
use anyhow::Result; use anyhow::Result;
use collections::HashSet; use collections::{HashMap, HashSet};
use gpui::Context; use gpui::Context;
use gpui::{App, Entity, SharedString, Task}; use gpui::{App, Entity, SharedString, Task};
use language::Buffer; use language::Buffer;
@@ -58,31 +58,53 @@ pub trait Tool: 'static + Send + Sync {
/// Tracks actions performed by tools in a thread /// Tracks actions performed by tools in a thread
#[derive(Debug)] #[derive(Debug)]
pub struct ActionLog { pub struct ActionLog {
changed_buffers: HashSet<Entity<Buffer>>, /// Buffers that user manually added to the context, and whose content has
pending_refresh: HashSet<Entity<Buffer>>, /// changed since the model last saw them.
stale_buffers_in_context: HashSet<Entity<Buffer>>,
/// Buffers that we want to notify the model about when they change.
tracked_buffers: HashMap<Entity<Buffer>, TrackedBuffer>,
}
#[derive(Debug, Default)]
struct TrackedBuffer {
version: clock::Global,
} }
impl ActionLog { impl ActionLog {
/// Creates a new, empty action log. /// Creates a new, empty action log.
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
changed_buffers: HashSet::default(), stale_buffers_in_context: HashSet::default(),
pending_refresh: HashSet::default(), tracked_buffers: HashMap::default(),
} }
} }
/// Registers buffers that have changed and need refreshing. /// Track a buffer as read, so we can notify the model about user edits.
pub fn notify_buffers_changed( pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
&mut self, let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
buffers: HashSet<Entity<Buffer>>, tracked_buffer.version = buffer.read(cx).version();
_cx: &mut Context<Self>, }
) {
self.changed_buffers.extend(buffers.clone()); /// Mark a buffer as edited, so we can refresh it in the context
self.pending_refresh.extend(buffers); pub fn buffer_edited(&mut self, buffers: HashSet<Entity<Buffer>>, cx: &mut Context<Self>) {
for buffer in &buffers {
let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
tracked_buffer.version = buffer.read(cx).version();
}
self.stale_buffers_in_context.extend(buffers);
}
/// Iterate over buffers changed since last read or edited by the model
pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
self.tracked_buffers
.iter()
.filter(|(buffer, tracked)| tracked.version != buffer.read(cx).version)
.map(|(buffer, _)| buffer)
} }
/// Takes and returns the set of buffers pending refresh, clearing internal state. /// Takes and returns the set of buffers pending refresh, clearing internal state.
pub fn take_pending_refresh_buffers(&mut self) -> HashSet<Entity<Buffer>> { pub fn take_stale_buffers_in_context(&mut self) -> HashSet<Entity<Buffer>> {
std::mem::take(&mut self.pending_refresh) std::mem::take(&mut self.stale_buffers_in_context)
} }
} }

View File

@@ -19,6 +19,9 @@ collections.workspace = true
feature_flags.workspace = true feature_flags.workspace = true
futures.workspace = true futures.workspace = true
gpui.workspace = true gpui.workspace = true
html_to_markdown.workspace = true
http_client.workspace = true
itertools.workspace = true
language.workspace = true language.workspace = true
language_model.workspace = true language_model.workspace = true
project.workspace = true project.workspace = true
@@ -26,15 +29,18 @@ release_channel.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
settings.workspace = true
theme.workspace = true theme.workspace = true
ui.workspace = true ui.workspace = true
util.workspace = true util.workspace = true
workspace.workspace = true workspace.workspace = true
settings.workspace = true worktree.workspace = true
[dev-dependencies] [dev-dependencies]
rand.workspace = true
collections = { workspace = true, features = ["test-support"] } collections = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] }
language = { workspace = true, features = ["test-support"] } language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] } project = { workspace = true, features = ["test-support"] }
rand.workspace = true
workspace = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -2,28 +2,33 @@ mod bash_tool;
mod delete_path_tool; mod delete_path_tool;
mod diagnostics_tool; mod diagnostics_tool;
mod edit_files_tool; mod edit_files_tool;
mod fetch_tool;
mod list_directory_tool; mod list_directory_tool;
mod now_tool; mod now_tool;
mod path_search_tool; mod path_search_tool;
mod read_file_tool; mod read_file_tool;
mod regex_search; mod regex_search_tool;
mod thinking_tool; mod thinking_tool;
use std::sync::Arc;
use assistant_tool::ToolRegistry; use assistant_tool::ToolRegistry;
use gpui::App; use gpui::App;
use http_client::HttpClientWithUrl;
use crate::bash_tool::BashTool; use crate::bash_tool::BashTool;
use crate::delete_path_tool::DeletePathTool; use crate::delete_path_tool::DeletePathTool;
use crate::diagnostics_tool::DiagnosticsTool; use crate::diagnostics_tool::DiagnosticsTool;
use crate::edit_files_tool::EditFilesTool; use crate::edit_files_tool::EditFilesTool;
use crate::fetch_tool::FetchTool;
use crate::list_directory_tool::ListDirectoryTool; use crate::list_directory_tool::ListDirectoryTool;
use crate::now_tool::NowTool; use crate::now_tool::NowTool;
use crate::path_search_tool::PathSearchTool; use crate::path_search_tool::PathSearchTool;
use crate::read_file_tool::ReadFileTool; use crate::read_file_tool::ReadFileTool;
use crate::regex_search::RegexSearchTool; use crate::regex_search_tool::RegexSearchTool;
use crate::thinking_tool::ThinkingTool; use crate::thinking_tool::ThinkingTool;
pub fn init(cx: &mut App) { pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
assistant_tool::init(cx); assistant_tool::init(cx);
crate::edit_files_tool::log::init(cx); crate::edit_files_tool::log::init(cx);
@@ -38,4 +43,5 @@ pub fn init(cx: &mut App) {
registry.register_tool(ReadFileTool); registry.register_tool(ReadFileTool);
registry.register_tool(RegexSearchTool); registry.register_tool(RegexSearchTool);
registry.register_tool(ThinkingTool); registry.register_tool(ThinkingTool);
registry.register_tool(FetchTool::new(http_client));
} }

View File

@@ -50,7 +50,7 @@ impl Tool for BashTool {
}; };
let working_directory = worktree.read(cx).abs_path(); let working_directory = worktree.read(cx).abs_path();
cx.spawn(|_| async move { cx.spawn(async move |_| {
// Add 2>&1 to merge stderr into stdout for proper interleaving. // Add 2>&1 to merge stderr into stdout for proper interleaving.
let command = format!("({}) 2>&1", input.command); let command = format!("({}) 2>&1", input.command);

View File

@@ -1,16 +1,15 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool}; use assistant_tool::{ActionLog, Tool};
use gpui::{App, Entity, Task}; use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage; use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{fs, path::PathBuf, sync::Arc}; use std::sync::Arc;
use util::paths::PathMatcher;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct DeletePathToolInput { pub struct DeletePathToolInput {
/// The glob to match files in the project to delete. /// The path of the file or directory to delete.
/// ///
/// <example> /// <example>
/// If the project has the following files: /// If the project has the following files:
@@ -19,9 +18,9 @@ pub struct DeletePathToolInput {
/// - directory2/a/things.txt /// - directory2/a/things.txt
/// - directory3/a/other.txt /// - directory3/a/other.txt
/// ///
/// You can delete the first two files by providing a glob of "*thing*.txt" /// You can delete the first file by providing a path of "directory1/a/something.txt"
/// </example> /// </example>
pub glob: String, pub path: String,
} }
pub struct DeletePathTool; pub struct DeletePathTool;
@@ -48,119 +47,26 @@ impl Tool for DeletePathTool {
_action_log: Entity<ActionLog>, _action_log: Entity<ActionLog>,
cx: &mut App, cx: &mut App,
) -> Task<Result<String>> { ) -> Task<Result<String>> {
let glob = match serde_json::from_value::<DeletePathToolInput>(input) { let path_str = match serde_json::from_value::<DeletePathToolInput>(input) {
Ok(input) => input.glob, Ok(input) => input.path,
Err(err) => return Task::ready(Err(anyhow!(err))), Err(err) => return Task::ready(Err(anyhow!(err))),
}; };
let path_matcher = match PathMatcher::new(&[glob.clone()]) {
Ok(matcher) => matcher,
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
};
struct Match { match project
display_path: String, .read(cx)
path: PathBuf, .find_project_path(&path_str, cx)
} .and_then(|path| project.update(cx, |project, cx| project.delete_file(path, false, cx)))
{
let mut matches = Vec::new(); Some(deletion_task) => cx.background_spawn(async move {
let mut deleted_paths = Vec::new(); match deletion_task.await {
let mut errors = Vec::new(); Ok(()) => Ok(format!("Deleted {}", &path_str)),
Err(err) => Err(anyhow!("Failed to delete {}: {}", &path_str, err)),
for worktree_handle in project.read(cx).worktrees(cx) {
let worktree = worktree_handle.read(cx);
let worktree_root = worktree.abs_path().to_path_buf();
// Don't consider ignored entries.
for entry in worktree.entries(false, 0) {
if path_matcher.is_match(&entry.path) {
matches.push(Match {
path: worktree_root.join(&entry.path),
display_path: entry.path.display().to_string(),
});
} }
} }),
} None => Task::ready(Err(anyhow!(
"Couldn't delete {} because that path isn't in this project.",
if matches.is_empty() { path_str
return Task::ready(Ok(format!("No paths in the project matched {glob:?}"))); ))),
}
let paths_matched = matches.len();
// Delete the files
for Match { path, display_path } in matches {
match fs::remove_file(&path) {
Ok(()) => {
deleted_paths.push(display_path);
}
Err(file_err) => {
// Try to remove directory if it's not a file. Retrying as a directory
// on error saves a syscall compared to checking whether it's
// a directory up front for every single file.
if let Err(dir_err) = fs::remove_dir_all(&path) {
let error = if path.is_dir() {
format!("Failed to delete directory {}: {dir_err}", display_path)
} else {
format!("Failed to delete file {}: {file_err}", display_path)
};
errors.push(error);
} else {
deleted_paths.push(display_path);
}
}
}
}
if errors.is_empty() {
// 0 deleted paths should never happen if there were no errors;
// we already returned if matches was empty.
let answer = if deleted_paths.len() == 1 {
format!(
"Deleted {}",
deleted_paths.first().unwrap_or(&String::new())
)
} else {
// Sort to group entries in the same directory together
deleted_paths.sort();
let mut buf = format!("Deleted these {} paths:\n", deleted_paths.len());
for path in deleted_paths.iter() {
buf.push('\n');
buf.push_str(path);
}
buf
};
Task::ready(Ok(answer))
} else {
if deleted_paths.is_empty() {
Task::ready(Err(anyhow!(
"{glob:?} matched {} deleted because of {}:\n{}",
if paths_matched == 1 {
"1 path, but it was not".to_string()
} else {
format!("{} paths, but none were", paths_matched)
},
if errors.len() == 1 {
"this error".to_string()
} else {
format!("{} errors", errors.len())
},
errors.join("\n")
)))
} else {
// Sort to group entries in the same directory together
deleted_paths.sort();
Task::ready(Ok(format!(
"Deleted {} paths matching glob {glob:?}:\n{}\n\nErrors:\n{}",
deleted_paths.len(),
deleted_paths.join("\n"),
errors.join("\n")
)))
}
} }
} }
} }

View File

@@ -1 +1 @@
Deletes all files and directories in the project which match the given glob, and returns a list of the paths that were deleted. Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion.

View File

@@ -65,10 +65,10 @@ impl Tool for DiagnosticsTool {
}; };
let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx)); let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx));
cx.spawn(|cx| async move { cx.spawn(async move |cx| {
let mut output = String::new(); let mut output = String::new();
let buffer = buffer.await?; let buffer = buffer.await?;
let snapshot = buffer.read_with(&cx, |buffer, _cx| buffer.snapshot())?; let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
for (_, group) in snapshot.diagnostic_groups(None) { for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix]; let entry = &group.entries[group.primary_ix];

View File

@@ -1,5 +1,6 @@
mod edit_action; mod edit_action;
pub mod log; pub mod log;
mod replace;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use assistant_tool::{ActionLog, Tool}; use assistant_tool::{ActionLog, Tool};
@@ -11,12 +12,12 @@ use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
}; };
use log::{EditToolLog, EditToolRequestId}; use log::{EditToolLog, EditToolRequestId};
use project::{search::SearchQuery, Project}; use project::Project;
use replace::{replace_exact, replace_with_flexible_indent};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Write; use std::fmt::Write;
use std::sync::Arc; use std::sync::Arc;
use util::paths::PathMatcher;
use util::ResultExt; use util::ResultExt;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -103,7 +104,7 @@ impl Tool for EditFilesTool {
cx, cx,
); );
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
let result = task.await; let result = task.await;
let str_result = match &result { let str_result = match &result {
@@ -111,10 +112,8 @@ impl Tool for EditFilesTool {
Err(err) => Err(err.to_string()), Err(err) => Err(err.to_string()),
}; };
log.update(&mut cx, |log, cx| { log.update(cx, |log, cx| log.set_tool_output(req_id, str_result, cx))
log.set_tool_output(req_id, str_result, cx) .log_err();
})
.log_err();
result result
}) })
@@ -127,6 +126,7 @@ impl Tool for EditFilesTool {
struct EditToolRequest { struct EditToolRequest {
parser: EditActionParser, parser: EditActionParser,
output: String,
changed_buffers: HashSet<Entity<language::Buffer>>, changed_buffers: HashSet<Entity<language::Buffer>>,
bad_searches: Vec<BadSearch>, bad_searches: Vec<BadSearch>,
project: Entity<Project>, project: Entity<Project>,
@@ -187,7 +187,7 @@ impl EditToolRequest {
cache: false, cache: false,
}); });
cx.spawn(|mut cx| async move { cx.spawn(async move |cx| {
let llm_request = LanguageModelRequest { let llm_request = LanguageModelRequest {
messages, messages,
tools: vec![], tools: vec![],
@@ -200,6 +200,8 @@ impl EditToolRequest {
let mut request = Self { let mut request = Self {
parser: EditActionParser::new(), parser: EditActionParser::new(),
// we start with the success header so we don't need to shift the output in the common case
output: Self::SUCCESS_OUTPUT_HEADER.to_string(),
changed_buffers: HashSet::default(), changed_buffers: HashSet::default(),
bad_searches: Vec::new(), bad_searches: Vec::new(),
action_log, action_log,
@@ -208,10 +210,10 @@ impl EditToolRequest {
}; };
while let Some(chunk) = chunks.stream.next().await { while let Some(chunk) = chunks.stream.next().await {
request.process_response_chunk(&chunk?, &mut cx).await?; request.process_response_chunk(&chunk?, cx).await?;
} }
request.finalize(&mut cx).await request.finalize(cx).await
}) })
} }
@@ -232,7 +234,11 @@ impl EditToolRequest {
Ok(()) Ok(())
} }
async fn apply_action(&mut self, action: EditAction, cx: &mut AsyncApp) -> Result<()> { async fn apply_action(
&mut self,
(action, source): (EditAction, String),
cx: &mut AsyncApp,
) -> Result<()> {
let project_path = self.project.read_with(cx, |project, cx| { let project_path = self.project.read_with(cx, |project, cx| {
project project
.find_project_path(action.file_path(), cx) .find_project_path(action.file_path(), cx)
@@ -270,6 +276,7 @@ impl EditToolRequest {
DiffResult::Diff(diff) => { DiffResult::Diff(diff) => {
let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?; let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
write!(&mut self.output, "\n\n{}", source)?;
self.changed_buffers.insert(buffer); self.changed_buffers.insert(buffer);
} }
} }
@@ -283,121 +290,119 @@ impl EditToolRequest {
file_path: std::path::PathBuf, file_path: std::path::PathBuf,
snapshot: language::BufferSnapshot, snapshot: language::BufferSnapshot,
) -> Result<DiffResult> { ) -> Result<DiffResult> {
let query = SearchQuery::text( let result =
old.clone(), // Try to match exactly
false, replace_exact(&old, &new, &snapshot)
true, .await
true, // If that fails, try being flexible about indentation
PathMatcher::new(&[])?, .or_else(|| replace_with_flexible_indent(&old, &new, &snapshot));
PathMatcher::new(&[])?,
None,
)?;
let matches = query.search(&snapshot, None).await; let Some(diff) = result else {
return anyhow::Ok(DiffResult::BadSearch(BadSearch {
if matches.is_empty() { search: old,
return Ok(DiffResult::BadSearch(BadSearch {
search: new.clone(),
file_path: file_path.display().to_string(), file_path: file_path.display().to_string(),
})); }));
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
}; };
anyhow::Ok(DiffResult::Diff(diff)) anyhow::Ok(DiffResult::Diff(diff))
} }
const SUCCESS_OUTPUT_HEADER: &str = "Successfully applied. Here's a list of changes:";
const ERROR_OUTPUT_HEADER_NO_EDITS: &str = "I couldn't apply any edits!";
const ERROR_OUTPUT_HEADER_WITH_EDITS: &str =
"Errors occurred. First, here's a list of the edits we managed to apply:";
async fn finalize(self, cx: &mut AsyncApp) -> Result<String> { async fn finalize(self, cx: &mut AsyncApp) -> Result<String> {
let mut answer = match self.changed_buffers.len() { let changed_buffer_count = self.changed_buffers.len();
0 => "No files were edited.".to_string(),
1 => "Successfully edited ".to_string(),
_ => "Successfully edited these files:\n\n".to_string(),
};
// Save each buffer once at the end // Save each buffer once at the end
for buffer in &self.changed_buffers { for buffer in &self.changed_buffers {
let (path, save_task) = self.project.update(cx, |project, cx| { self.project
let path = buffer .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
.read(cx) .await?;
.file()
.map(|file| file.path().display().to_string());
let task = project.save_buffer(buffer.clone(), cx);
(path, task)
})?;
save_task.await?;
if let Some(path) = path {
writeln!(&mut answer, "{}", path)?;
}
} }
self.action_log self.action_log
.update(cx, |log, cx| { .update(cx, |log, cx| log.buffer_edited(self.changed_buffers, cx))
log.notify_buffers_changed(self.changed_buffers, cx)
})
.log_err(); .log_err();
let errors = self.parser.errors(); let errors = self.parser.errors();
if errors.is_empty() && self.bad_searches.is_empty() { if errors.is_empty() && self.bad_searches.is_empty() {
let answer = answer.trim_end().to_string(); if changed_buffer_count == 0 {
Ok(answer) return Err(anyhow!(
"The instructions didn't lead to any changes. You might need to consult the file contents first."
));
}
Ok(self.output)
} else { } else {
let mut output = self.output;
if output.is_empty() {
output.replace_range(
0..Self::SUCCESS_OUTPUT_HEADER.len(),
Self::ERROR_OUTPUT_HEADER_NO_EDITS,
);
} else {
output.replace_range(
0..Self::SUCCESS_OUTPUT_HEADER.len(),
Self::ERROR_OUTPUT_HEADER_WITH_EDITS,
);
}
if !self.bad_searches.is_empty() { if !self.bad_searches.is_empty() {
writeln!( writeln!(
&mut answer, &mut output,
"\nThese searches failed because they didn't match any strings:" "\n\n# {} SEARCH/REPLACE block(s) failed to match:\n",
self.bad_searches.len()
)?; )?;
for replace in self.bad_searches { for replace in self.bad_searches {
writeln!( writeln!(
&mut answer, &mut output,
"- '{}' does not appear in `{}`", "## No exact match in: {}\n```\n{}\n```\n",
replace.search.replace("\r", "\\r").replace("\n", "\\n"), replace.file_path, replace.search,
replace.file_path
)?; )?;
} }
writeln!(&mut answer, "Make sure to use exact searches.")?; write!(&mut output,
"The SEARCH section must exactly match an existing block of lines including all white \
space, comments, indentation, docstrings, etc."
)?;
} }
if !errors.is_empty() { if !errors.is_empty() {
writeln!( writeln!(
&mut answer, &mut output,
"\nThese SEARCH/REPLACE blocks failed to parse:" "\n\n# {} SEARCH/REPLACE blocks failed to parse:",
errors.len()
)?; )?;
for error in errors { for error in errors {
writeln!(&mut answer, "- {}", error)?; writeln!(&mut output, "- {}", error)?;
} }
} }
if changed_buffer_count > 0 {
writeln!(
&mut output,
"\n\nThe other SEARCH/REPLACE blocks were applied successfully. Do not re-send them!",
)?;
}
writeln!( writeln!(
&mut answer, &mut output,
"\nYou can fix errors by running the tool again. You can include instructions,\ "{}You can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them." but errors are part of the conversation so you don't need to repeat them.",
if changed_buffer_count == 0 {
"\n\n"
} else {
""
}
)?; )?;
Err(anyhow!(answer.trim_end().to_string())) Err(anyhow!(output))
} }
} }
} }

View File

@@ -1,4 +1,8 @@
use std::path::{Path, PathBuf}; use std::{
mem::take,
ops::Range,
path::{Path, PathBuf},
};
use util::ResultExt; use util::ResultExt;
/// Represents an edit action to be performed on a file. /// Represents an edit action to be performed on a file.
@@ -28,12 +32,14 @@ impl EditAction {
#[derive(Debug)] #[derive(Debug)]
pub struct EditActionParser { pub struct EditActionParser {
state: State, state: State,
pre_fence_line: Vec<u8>,
marker_ix: usize,
line: usize, line: usize,
column: usize, column: usize,
old_bytes: Vec<u8>, marker_ix: usize,
new_bytes: Vec<u8>, action_source: Vec<u8>,
fence_start_offset: usize,
block_range: Range<usize>,
old_range: Range<usize>,
new_range: Range<usize>,
errors: Vec<ParseError>, errors: Vec<ParseError>,
} }
@@ -58,12 +64,14 @@ impl EditActionParser {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
state: State::Default, state: State::Default,
pre_fence_line: Vec::new(),
marker_ix: 0,
line: 1, line: 1,
column: 0, column: 0,
old_bytes: Vec::new(), action_source: Vec::new(),
new_bytes: Vec::new(), fence_start_offset: 0,
marker_ix: 0,
block_range: Range::default(),
old_range: Range::default(),
new_range: Range::default(),
errors: Vec::new(), errors: Vec::new(),
} }
} }
@@ -76,7 +84,7 @@ impl EditActionParser {
/// ///
/// If a block fails to parse, it will simply be skipped and an error will be recorded. /// If a block fails to parse, it will simply be skipped and an error will be recorded.
/// All errors can be accessed through the `EditActionsParser::errors` method. /// All errors can be accessed through the `EditActionsParser::errors` method.
pub fn parse_chunk(&mut self, input: &str) -> Vec<EditAction> { pub fn parse_chunk(&mut self, input: &str) -> Vec<(EditAction, String)> {
use State::*; use State::*;
const FENCE: &[u8] = b"```"; const FENCE: &[u8] = b"```";
@@ -97,20 +105,21 @@ impl EditActionParser {
self.column += 1; self.column += 1;
} }
let action_offset = self.action_source.len();
match &self.state { match &self.state {
Default => match match_marker(byte, FENCE, false, &mut self.marker_ix) { Default => match self.match_marker(byte, FENCE, false) {
MarkerMatch::Complete => { MarkerMatch::Complete => {
self.fence_start_offset = action_offset + 1 - FENCE.len();
self.to_state(OpenFence); self.to_state(OpenFence);
} }
MarkerMatch::Partial => {} MarkerMatch::Partial => {}
MarkerMatch::None => { MarkerMatch::None => {
if self.marker_ix > 0 { if self.marker_ix > 0 {
self.marker_ix = 0; self.marker_ix = 0;
} else if self.pre_fence_line.ends_with(b"\n") { } else if self.action_source.ends_with(b"\n") {
self.pre_fence_line.clear(); self.action_source.clear();
} }
self.pre_fence_line.push(byte);
} }
}, },
OpenFence => { OpenFence => {
@@ -125,39 +134,34 @@ impl EditActionParser {
} }
} }
SearchBlock => { SearchBlock => {
if collect_until_marker( if self.extend_block_range(byte, DIVIDER, NL_DIVIDER) {
byte, self.old_range = take(&mut self.block_range);
DIVIDER,
NL_DIVIDER,
true,
&mut self.marker_ix,
&mut self.old_bytes,
) {
self.to_state(ReplaceBlock); self.to_state(ReplaceBlock);
} }
} }
ReplaceBlock => { ReplaceBlock => {
if collect_until_marker( if self.extend_block_range(byte, REPLACE_MARKER, NL_REPLACE_MARKER) {
byte, self.new_range = take(&mut self.block_range);
REPLACE_MARKER,
NL_REPLACE_MARKER,
true,
&mut self.marker_ix,
&mut self.new_bytes,
) {
self.to_state(CloseFence); self.to_state(CloseFence);
} }
} }
CloseFence => { CloseFence => {
if self.expect_marker(byte, FENCE, false) { if self.expect_marker(byte, FENCE, false) {
self.action_source.push(byte);
if let Some(action) = self.action() { if let Some(action) = self.action() {
actions.push(action); actions.push(action);
} }
self.errors(); self.errors();
self.reset(); self.reset();
continue;
} }
} }
}; };
self.action_source.push(byte);
} }
actions actions
@@ -168,48 +172,44 @@ impl EditActionParser {
&self.errors &self.errors
} }
fn action(&mut self) -> Option<EditAction> { fn action(&mut self) -> Option<(EditAction, String)> {
if self.old_bytes.is_empty() && self.new_bytes.is_empty() { let old_range = take(&mut self.old_range);
self.push_error(ParseErrorKind::NoOp); let new_range = take(&mut self.new_range);
return None;
}
let mut pre_fence_line = std::mem::take(&mut self.pre_fence_line); let action_source = take(&mut self.action_source);
let action_source = String::from_utf8(action_source).log_err()?;
if pre_fence_line.ends_with(b"\n") { let mut file_path_bytes = action_source[..self.fence_start_offset].to_owned();
pre_fence_line.pop();
pop_carriage_return(&mut pre_fence_line);
}
let file_path = PathBuf::from(String::from_utf8(pre_fence_line).log_err()?); if file_path_bytes.ends_with("\n") {
let content = String::from_utf8(std::mem::take(&mut self.new_bytes)).log_err()?; file_path_bytes.pop();
if file_path_bytes.ends_with("\r") {
if self.old_bytes.is_empty() { file_path_bytes.pop();
Some(EditAction::Write { file_path, content })
} else {
let old = String::from_utf8(std::mem::take(&mut self.old_bytes)).log_err()?;
Some(EditAction::Replace {
file_path,
old,
new: content,
})
}
}
fn expect_marker(&mut self, byte: u8, marker: &'static [u8], trailing_newline: bool) -> bool {
match match_marker(byte, marker, trailing_newline, &mut self.marker_ix) {
MarkerMatch::Complete => true,
MarkerMatch::Partial => false,
MarkerMatch::None => {
self.push_error(ParseErrorKind::ExpectedMarker {
expected: marker,
found: byte,
});
self.reset();
false
} }
} }
let file_path = PathBuf::from(file_path_bytes);
if old_range.is_empty() {
return Some((
EditAction::Write {
file_path,
content: action_source[new_range].to_owned(),
},
action_source,
));
}
let old = action_source[old_range].to_owned();
let new = action_source[new_range].to_owned();
let action = EditAction::Replace {
file_path,
old,
new,
};
Some((action, action_source))
} }
fn to_state(&mut self, state: State) { fn to_state(&mut self, state: State) {
@@ -218,18 +218,95 @@ impl EditActionParser {
} }
fn reset(&mut self) { fn reset(&mut self) {
self.pre_fence_line.clear(); self.action_source.clear();
self.old_bytes.clear(); self.block_range = Range::default();
self.new_bytes.clear(); self.old_range = Range::default();
self.new_range = Range::default();
self.fence_start_offset = 0;
self.marker_ix = 0;
self.to_state(State::Default); self.to_state(State::Default);
} }
fn push_error(&mut self, kind: ParseErrorKind) { fn expect_marker(&mut self, byte: u8, marker: &'static [u8], trailing_newline: bool) -> bool {
self.errors.push(ParseError { match self.match_marker(byte, marker, trailing_newline) {
line: self.line, MarkerMatch::Complete => true,
column: self.column, MarkerMatch::Partial => false,
kind, MarkerMatch::None => {
}); self.errors.push(ParseError {
line: self.line,
column: self.column,
expected: marker,
found: byte,
});
self.reset();
false
}
}
}
fn extend_block_range(&mut self, byte: u8, marker: &[u8], nl_marker: &[u8]) -> bool {
let marker = if self.block_range.is_empty() {
// do not require another newline if block is empty
marker
} else {
nl_marker
};
let offset = self.action_source.len();
match self.match_marker(byte, marker, true) {
MarkerMatch::Complete => {
if self.action_source[self.block_range.clone()].ends_with(b"\r") {
self.block_range.end -= 1;
}
true
}
MarkerMatch::Partial => false,
MarkerMatch::None => {
if self.marker_ix > 0 {
self.marker_ix = 0;
self.block_range.end = offset;
// The beginning of marker might match current byte
match self.match_marker(byte, marker, true) {
MarkerMatch::Complete => return true,
MarkerMatch::Partial => return false,
MarkerMatch::None => { /* no match, keep collecting */ }
}
}
if self.block_range.is_empty() {
self.block_range.start = offset;
}
self.block_range.end = offset + 1;
false
}
}
}
fn match_marker(&mut self, byte: u8, marker: &[u8], trailing_newline: bool) -> MarkerMatch {
if trailing_newline && self.marker_ix >= marker.len() {
if byte == b'\n' {
MarkerMatch::Complete
} else if byte == b'\r' {
MarkerMatch::Partial
} else {
MarkerMatch::None
}
} else if byte == marker[self.marker_ix] {
self.marker_ix += 1;
if self.marker_ix < marker.len() || trailing_newline {
MarkerMatch::Partial
} else {
MarkerMatch::Complete
}
} else {
MarkerMatch::None
}
} }
} }
@@ -240,114 +317,24 @@ enum MarkerMatch {
Complete, Complete,
} }
fn match_marker(
byte: u8,
marker: &[u8],
trailing_newline: bool,
marker_ix: &mut usize,
) -> MarkerMatch {
if trailing_newline && *marker_ix >= marker.len() {
if byte == b'\n' {
MarkerMatch::Complete
} else if byte == b'\r' {
MarkerMatch::Partial
} else {
MarkerMatch::None
}
} else if byte == marker[*marker_ix] {
*marker_ix += 1;
if *marker_ix < marker.len() || trailing_newline {
MarkerMatch::Partial
} else {
MarkerMatch::Complete
}
} else {
MarkerMatch::None
}
}
fn collect_until_marker(
byte: u8,
marker: &[u8],
nl_marker: &[u8],
trailing_newline: bool,
marker_ix: &mut usize,
buf: &mut Vec<u8>,
) -> bool {
let marker = if buf.is_empty() {
// do not require another newline if block is empty
marker
} else {
nl_marker
};
match match_marker(byte, marker, trailing_newline, marker_ix) {
MarkerMatch::Complete => {
pop_carriage_return(buf);
true
}
MarkerMatch::Partial => false,
MarkerMatch::None => {
if *marker_ix > 0 {
buf.extend_from_slice(&marker[..*marker_ix]);
*marker_ix = 0;
// The beginning of marker might match current byte
match match_marker(byte, marker, trailing_newline, marker_ix) {
MarkerMatch::Complete => return true,
MarkerMatch::Partial => return false,
MarkerMatch::None => { /* no match, keep collecting */ }
}
}
buf.push(byte);
false
}
}
}
fn pop_carriage_return(buf: &mut Vec<u8>) {
if buf.ends_with(b"\r") {
buf.pop();
}
}
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct ParseError { pub struct ParseError {
line: usize, line: usize,
column: usize, column: usize,
kind: ParseErrorKind, expected: &'static [u8],
} found: u8,
#[derive(Debug, PartialEq, Eq)]
pub enum ParseErrorKind {
ExpectedMarker { expected: &'static [u8], found: u8 },
NoOp,
}
impl std::fmt::Display for ParseErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ParseErrorKind::ExpectedMarker { expected, found } => {
write!(
f,
"Expected marker {:?}, found {:?}",
String::from_utf8_lossy(expected),
*found as char
)
}
ParseErrorKind::NoOp => {
write!(f, "No search or replace")
}
}
}
} }
impl std::fmt::Display for ParseError { impl std::fmt::Display for ParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "input:{}:{}: {}", self.line, self.column, self.kind) write!(
f,
"input:{}:{}: Expected marker {:?}, found {:?}",
self.line,
self.column,
String::from_utf8_lossy(self.expected),
self.found as char
)
} }
} }
@@ -372,16 +359,16 @@ fn replacement() {}
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(), old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(), new: "fn replacement() {}".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -399,16 +386,16 @@ fn replacement() {}
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(), old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(), new: "fn replacement() {}".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -430,16 +417,16 @@ This change makes the function better.
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(), old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(), new: "fn replacement() {}".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -468,24 +455,27 @@ fn new_util() -> bool { true }
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 2); assert_eq!(actions.len(), 2);
let (action, _) = &actions[0];
assert_eq!( assert_eq!(
actions[0], action,
EditAction::Replace { &EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(), old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(), new: "fn replacement() {}".to_string(),
} }
); );
let (action2, _) = &actions[1];
assert_eq!( assert_eq!(
actions[1], action2,
EditAction::Replace { &EditAction::Replace {
file_path: PathBuf::from("src/utils.rs"), file_path: PathBuf::from("src/utils.rs"),
old: "fn old_util() -> bool { false }".to_string(), old: "fn old_util() -> bool { false }".to_string(),
new: "fn new_util() -> bool { true }".to_string(), new: "fn new_util() -> bool { true }".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -517,16 +507,18 @@ fn replacement() {
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
let (action, _) = &actions[0];
assert_eq!( assert_eq!(
actions[0], action,
EditAction::Replace { &EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {\n println!(\"This is the original function\");\n let x = 42;\n if x > 0 {\n println!(\"Positive number\");\n }\n}".to_string(), old: "fn original() {\n println!(\"This is the original function\");\n let x = 42;\n if x > 0 {\n println!(\"Positive number\");\n }\n}".to_string(),
new: "fn replacement() {\n println!(\"This is the replacement function\");\n let x = 100;\n if x > 50 {\n println!(\"Large number\");\n } else {\n println!(\"Small number\");\n }\n}".to_string(), new: "fn replacement() {\n println!(\"This is the replacement function\");\n let x = 100;\n if x > 50 {\n println!(\"Large number\");\n } else {\n println!(\"Small number\");\n }\n}".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -547,16 +539,16 @@ fn new_function() {
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Write { EditAction::Write {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
content: "fn new_function() {\n println!(\"This function is being added\");\n}" content: "fn new_function() {\n println!(\"This function is being added\");\n}"
.to_string(), .to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -574,9 +566,11 @@ fn this_will_be_deleted() {
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(&input); let actions = parser.parse_chunk(&input);
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn this_will_be_deleted() {\n println!(\"Deleting this function\");\n}" old: "fn this_will_be_deleted() {\n println!(\"Deleting this function\");\n}"
@@ -584,12 +578,13 @@ fn this_will_be_deleted() {
new: "".to_string(), new: "".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(&input.replace("\n", "\r\n")); let actions = parser.parse_chunk(&input.replace("\n", "\r\n"));
assert_no_errors(&parser);
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: old:
@@ -598,7 +593,6 @@ fn this_will_be_deleted() {
new: "".to_string(), new: "".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -614,15 +608,15 @@ fn this_will_be_deleted() {
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions = parser.parse_chunk(input); let actions = parser.parse_chunk(input);
// Should not create an action when both sections are empty assert_eq!(actions.len(), 1);
assert_eq!(actions.len(), 0); assert_eq!(
actions[0].0,
// Check that the NoOp error was added EditAction::Write {
assert_eq!(parser.errors().len(), 1); file_path: PathBuf::from("src/main.rs"),
match parser.errors()[0].kind { content: String::new(),
ParseErrorKind::NoOp => {} }
_ => panic!("Expected NoOp error"), );
} assert_no_errors(&parser);
} }
#[test] #[test]
@@ -643,26 +637,27 @@ fn replacement() {}"#;
let mut parser = EditActionParser::new(); let mut parser = EditActionParser::new();
let actions1 = parser.parse_chunk(input_part1); let actions1 = parser.parse_chunk(input_part1);
assert_no_errors(&parser);
assert_eq!(actions1.len(), 0); assert_eq!(actions1.len(), 0);
assert_eq!(parser.errors().len(), 0);
let actions2 = parser.parse_chunk(input_part2); let actions2 = parser.parse_chunk(input_part2);
// No actions should be complete yet // No actions should be complete yet
assert_no_errors(&parser);
assert_eq!(actions2.len(), 0); assert_eq!(actions2.len(), 0);
assert_eq!(parser.errors().len(), 0);
let actions3 = parser.parse_chunk(input_part3); let actions3 = parser.parse_chunk(input_part3);
// The third chunk should complete the action // The third chunk should complete the action
assert_no_errors(&parser);
assert_eq!(actions3.len(), 1); assert_eq!(actions3.len(), 1);
let (action, _) = &actions3[0];
assert_eq!( assert_eq!(
actions3[0], action,
EditAction::Replace { &EditAction::Replace {
file_path: PathBuf::from("src/main.rs"), file_path: PathBuf::from("src/main.rs"),
old: "fn original() {}".to_string(), old: "fn original() {}".to_string(),
new: "fn replacement() {}".to_string(), new: "fn replacement() {}".to_string(),
} }
); );
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -671,28 +666,35 @@ fn replacement() {}"#;
let actions1 = parser.parse_chunk("src/main.rs\n```rust\n<<<<<<< SEARCH\n"); let actions1 = parser.parse_chunk("src/main.rs\n```rust\n<<<<<<< SEARCH\n");
// Check parser is in the correct state // Check parser is in the correct state
assert_no_errors(&parser);
assert_eq!(parser.state, State::SearchBlock); assert_eq!(parser.state, State::SearchBlock);
assert_eq!(parser.pre_fence_line, b"src/main.rs\n"); assert_eq!(
assert_eq!(parser.errors().len(), 0); parser.action_source,
b"src/main.rs\n```rust\n<<<<<<< SEARCH\n"
);
// Continue parsing // Continue parsing
let actions2 = parser.parse_chunk("original code\n=======\n"); let actions2 = parser.parse_chunk("original code\n=======\n");
assert_no_errors(&parser);
assert_eq!(parser.state, State::ReplaceBlock); assert_eq!(parser.state, State::ReplaceBlock);
assert_eq!(parser.old_bytes, b"original code"); assert_eq!(
assert_eq!(parser.errors().len(), 0); &parser.action_source[parser.old_range.clone()],
b"original code"
);
let actions3 = parser.parse_chunk("replacement code\n>>>>>>> REPLACE\n```\n"); let actions3 = parser.parse_chunk("replacement code\n>>>>>>> REPLACE\n```\n");
// After complete parsing, state should reset // After complete parsing, state should reset
assert_no_errors(&parser);
assert_eq!(parser.state, State::Default); assert_eq!(parser.state, State::Default);
assert_eq!(parser.pre_fence_line, b"\n"); assert_eq!(parser.action_source, b"\n");
assert!(parser.old_bytes.is_empty()); assert!(parser.old_range.is_empty());
assert!(parser.new_bytes.is_empty()); assert!(parser.new_range.is_empty());
assert_eq!(actions1.len(), 0); assert_eq!(actions1.len(), 0);
assert_eq!(actions2.len(), 0); assert_eq!(actions2.len(), 0);
assert_eq!(actions3.len(), 1); assert_eq!(actions3.len(), 1);
assert_eq!(parser.errors().len(), 0);
} }
#[test] #[test]
@@ -746,9 +748,10 @@ fn new_utils_func() {}
// Only the second block should be parsed // Only the second block should be parsed
assert_eq!(actions.len(), 1); assert_eq!(actions.len(), 1);
let (action, _) = &actions[0];
assert_eq!( assert_eq!(
actions[0], action,
EditAction::Replace { &EditAction::Replace {
file_path: PathBuf::from("src/utils.rs"), file_path: PathBuf::from("src/utils.rs"),
old: "fn utils_func() {}".to_string(), old: "fn utils_func() {}".to_string(),
new: "fn new_utils_func() {}".to_string(), new: "fn new_utils_func() {}".to_string(),
@@ -757,7 +760,7 @@ fn new_utils_func() {}
assert_eq!(parser.errors().len(), 1); assert_eq!(parser.errors().len(), 1);
assert_eq!( assert_eq!(
parser.errors()[0].to_string(), parser.errors()[0].to_string(),
"input:8:1: Expected marker \"```\", found '<'".to_string() "input:8:1: Expected marker \"```\", found '<'"
); );
// The parser should continue after an error // The parser should continue after an error
@@ -784,18 +787,19 @@ fn new_utils_func() {}
let (chunk, rest) = remaining.split_at(chunk_size); let (chunk, rest) = remaining.split_at(chunk_size);
actions.extend(parser.parse_chunk(chunk)); let chunk_actions = parser.parse_chunk(chunk);
actions.extend(chunk_actions);
remaining = rest; remaining = rest;
} }
assert_examples_in_system_prompt(&actions, parser.errors()); assert_examples_in_system_prompt(&actions, parser.errors());
} }
fn assert_examples_in_system_prompt(actions: &[EditAction], errors: &[ParseError]) { fn assert_examples_in_system_prompt(actions: &[(EditAction, String)], errors: &[ParseError]) {
assert_eq!(actions.len(), 5); assert_eq!(actions.len(), 5);
assert_eq!( assert_eq!(
actions[0], actions[0].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"), file_path: PathBuf::from("mathweb/flask/app.py"),
old: "from flask import Flask".to_string(), old: "from flask import Flask".to_string(),
@@ -804,7 +808,7 @@ fn new_utils_func() {}
); );
assert_eq!( assert_eq!(
actions[1], actions[1].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"), file_path: PathBuf::from("mathweb/flask/app.py"),
old: line_endings!("def factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n").to_string(), old: line_endings!("def factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n").to_string(),
@@ -813,7 +817,7 @@ fn new_utils_func() {}
); );
assert_eq!( assert_eq!(
actions[2], actions[2].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("mathweb/flask/app.py"), file_path: PathBuf::from("mathweb/flask/app.py"),
old: " return str(factorial(n))".to_string(), old: " return str(factorial(n))".to_string(),
@@ -822,7 +826,7 @@ fn new_utils_func() {}
); );
assert_eq!( assert_eq!(
actions[3], actions[3].0,
EditAction::Write { EditAction::Write {
file_path: PathBuf::from("hello.py"), file_path: PathBuf::from("hello.py"),
content: line_endings!( content: line_endings!(
@@ -833,7 +837,7 @@ fn new_utils_func() {}
); );
assert_eq!( assert_eq!(
actions[4], actions[4].0,
EditAction::Replace { EditAction::Replace {
file_path: PathBuf::from("main.py"), file_path: PathBuf::from("main.py"),
old: line_endings!( old: line_endings!(
@@ -882,4 +886,20 @@ fn replacement() {}
assert_eq!(format!("{}", error), expected_error); assert_eq!(format!("{}", error), expected_error);
} }
// helpers
fn assert_no_errors(parser: &EditActionParser) {
let errors = parser.errors();
assert!(
errors.is_empty(),
"Expected no errors, but found:\n\n{}",
errors
.iter()
.map(|e| e.to_string())
.collect::<Vec<String>>()
.join("\n")
);
}
} }

View File

@@ -80,7 +80,7 @@ impl EditToolLog {
&mut self, &mut self,
id: EditToolRequestId, id: EditToolRequestId,
chunk: &str, chunk: &str,
new_actions: &[EditAction], new_actions: &[(EditAction, String)],
cx: &mut Context<Self>, cx: &mut Context<Self>,
) { ) {
if let Some(request) = self.requests.get_mut(id.0 as usize) { if let Some(request) = self.requests.get_mut(id.0 as usize) {
@@ -92,7 +92,9 @@ impl EditToolLog {
response.push_str(chunk); response.push_str(chunk);
} }
} }
request.parsed_edits.extend(new_actions.iter().cloned()); request
.parsed_edits
.extend(new_actions.iter().cloned().map(|(action, _)| action));
cx.emit(EditToolLogEvent::Updated); cx.emit(EditToolLogEvent::Updated);
} }

View File

@@ -0,0 +1,525 @@
use language::{BufferSnapshot, Diff, Point, ToOffset};
use project::search::SearchQuery;
use util::{paths::PathMatcher, ResultExt as _};
/// Performs an exact string replacement in a buffer, requiring precise character-for-character matching.
/// Uses the search functionality to locate the first occurrence of the exact string.
/// Returns None if no exact match is found in the buffer.
pub async fn replace_exact(old: &str, new: &str, snapshot: &BufferSnapshot) -> Option<Diff> {
let query = SearchQuery::text(
old,
false,
true,
true,
PathMatcher::new(&[]).ok()?,
PathMatcher::new(&[]).ok()?,
None,
)
.log_err()?;
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return None;
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
};
Some(diff)
}
/// Performs a replacement that's indentation-aware - matches text content ignoring leading whitespace differences.
/// When replacing, preserves the indentation level found in the buffer at each matching line.
/// Returns None if no match found or if indentation is offset inconsistently across matched lines.
pub fn replace_with_flexible_indent(old: &str, new: &str, buffer: &BufferSnapshot) -> Option<Diff> {
let (old_lines, old_min_indent) = lines_with_min_indent(old);
let (new_lines, new_min_indent) = lines_with_min_indent(new);
let min_indent = old_min_indent.min(new_min_indent);
let old_lines = drop_lines_prefix(&old_lines, min_indent);
let new_lines = drop_lines_prefix(&new_lines, min_indent);
let max_row = buffer.max_point().row;
'windows: for start_row in 0..max_row.saturating_sub(old_lines.len() as u32 - 1) {
let mut common_leading = None;
let end_row = start_row + old_lines.len() as u32 - 1;
if end_row > max_row {
// The buffer ends before fully matching the pattern
return None;
}
let start_point = Point::new(start_row, 0);
let end_point = Point::new(end_row, buffer.line_len(end_row));
let range = start_point.to_offset(buffer)..end_point.to_offset(buffer);
let window_text = buffer.text_for_range(range.clone());
let mut window_lines = window_text.lines();
let mut old_lines_iter = old_lines.iter();
while let (Some(window_line), Some(old_line)) = (window_lines.next(), old_lines_iter.next())
{
let line_trimmed = window_line.trim_start();
if line_trimmed != old_line.trim_start() {
continue 'windows;
}
if line_trimmed.is_empty() {
continue;
}
let line_leading = &window_line[..window_line.len() - old_line.len()];
match &common_leading {
Some(common_leading) if common_leading != line_leading => {
continue 'windows;
}
Some(_) => (),
None => common_leading = Some(line_leading.to_string()),
}
}
if let Some(common_leading) = common_leading {
let line_ending = buffer.line_ending();
let replacement = new_lines
.iter()
.map(|new_line| {
if new_line.trim().is_empty() {
new_line.to_string()
} else {
common_leading.to_string() + new_line
}
})
.collect::<Vec<_>>()
.join(line_ending.as_str());
let diff = Diff {
base_version: buffer.version().clone(),
line_ending,
edits: vec![(range, replacement.into())],
};
return Some(diff);
}
}
None
}
fn drop_lines_prefix<'a>(lines: &'a [&str], prefix_len: usize) -> Vec<&'a str> {
lines
.iter()
.map(|line| line.get(prefix_len..).unwrap_or(""))
.collect()
}
fn lines_with_min_indent(input: &str) -> (Vec<&str>, usize) {
let mut lines = Vec::new();
let mut min_indent: Option<usize> = None;
for line in input.lines() {
lines.push(line);
if !line.trim().is_empty() {
let indent = line.len() - line.trim_start().len();
min_indent = Some(min_indent.map_or(indent, |m| m.min(indent)));
}
}
(lines, min_indent.unwrap_or(0))
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::prelude::*;
use gpui::TestAppContext;
use unindent::Unindent;
#[gpui::test]
fn test_replace_consistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
let y = 10;
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("New value: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 42;
println!("New value: {}", x);
let y = 10;
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_inconsistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if condition {
println!("{}", 43);
}
}
"#
.unindent();
let old = r#"
if condition {
println!("{}", 43);
"#
.unindent();
let new = r#"
if condition {
println!("{}", 42);
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_with_empty_lines(cx: &mut TestAppContext) {
// Test with empty lines
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("New x: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 10;
println!("New x: {}", x);
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_no_match(cx: &mut TestAppContext) {
// Test with no match
let whole = r#"
fn test() {
let x = 5;
}
"#
.unindent();
let old = r#"
let y = 10;
"#
.unindent();
let new = r#"
let y = 20;
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_whole_ends_before_matching_old(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("x = {}", x);
"#
.unindent();
// Should return None because whole doesn't fully contain the old text
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[test]
fn test_lines_with_min_indent() {
// Empty string
assert_eq!(lines_with_min_indent(""), (vec![], 0));
// Single line without indentation
assert_eq!(lines_with_min_indent("hello"), (vec!["hello"], 0));
// Multiple lines with no indentation
assert_eq!(
lines_with_min_indent("line1\nline2\nline3"),
(vec!["line1", "line2", "line3"], 0)
);
// Multiple lines with consistent indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Multiple lines with varying indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Lines with mixed indentation and empty lines
assert_eq!(
lines_with_min_indent(" line1\n\n line2"),
(vec![" line1", "", " line2"], 2)
);
}
#[gpui::test]
fn test_replace_with_missing_indent_uneven_match(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if true {
let x = 5;
println!("x = {}", x);
}
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("x = {}", x);
"#
.unindent();
let expected = r#"
fn test() {
if true {
let x = 42;
println!("x = {}", x);
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_big_example(cx: &mut TestAppContext) {
let whole = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
}
"#
.unindent();
let old = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
"#
.unindent();
let new = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
"#
.unindent();
let expected = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[test]
fn test_drop_lines_prefix() {
// Empty array
assert_eq!(drop_lines_prefix(&[], 2), Vec::<&str>::new());
// Zero prefix length
assert_eq!(
drop_lines_prefix(&["line1", "line2"], 0),
vec!["line1", "line2"]
);
// Normal prefix drop
assert_eq!(
drop_lines_prefix(&[" line1", " line2"], 2),
vec!["line1", "line2"]
);
// Prefix longer than some lines
assert_eq!(drop_lines_prefix(&[" line1", "a"], 2), vec!["line1", ""]);
// Prefix longer than all lines
assert_eq!(drop_lines_prefix(&["a", "b"], 5), vec!["", ""]);
// Mixed length lines
assert_eq!(
drop_lines_prefix(&[" line1", " line2", " line3"], 2),
vec![" line1", "line2", " line3"]
);
}
fn test_replace_with_flexible_indent(
cx: &mut TestAppContext,
whole: &str,
old: &str,
new: &str,
) -> Option<String> {
// Create a local buffer with the test content
let buffer = cx.new(|cx| language::Buffer::local(whole, cx));
// Get the buffer snapshot
let buffer_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
// Call replace_flexible and transform the result
replace_with_flexible_indent(old, new, &buffer_snapshot).map(|diff| {
buffer.update(cx, |buffer, cx| {
let _ = buffer.apply_diff(diff, cx);
buffer.text()
})
})
}
}

View File

@@ -0,0 +1,153 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
use anyhow::{anyhow, bail, Context as _, Result};
use assistant_tool::{ActionLog, Tool};
use futures::AsyncReadExt as _;
use gpui::{App, AppContext as _, Entity, Task};
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
use http_client::{AsyncBody, HttpClientWithUrl};
use language_model::LanguageModelRequestMessage;
use project::Project;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
enum ContentType {
Html,
Plaintext,
Json,
}
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct FetchToolInput {
/// The URL to fetch.
url: String,
}
pub struct FetchTool {
http_client: Arc<HttpClientWithUrl>,
}
impl FetchTool {
pub fn new(http_client: Arc<HttpClientWithUrl>) -> Self {
Self { http_client }
}
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: &str) -> Result<String> {
let mut url = url.to_owned();
if !url.starts_with("https://") && !url.starts_with("http://") {
url = format!("https://{url}");
}
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading response body")?;
if response.status().is_client_error() {
let text = String::from_utf8_lossy(body.as_slice());
bail!(
"status error {}, response: {text:?}",
response.status().as_u16()
);
}
let Some(content_type) = response.headers().get("content-type") else {
bail!("missing Content-Type header");
};
let content_type = content_type
.to_str()
.context("invalid Content-Type header")?;
let content_type = match content_type {
"text/html" => ContentType::Html,
"text/plain" => ContentType::Plaintext,
"application/json" => ContentType::Json,
_ => ContentType::Html,
};
match content_type {
ContentType::Html => {
let mut handlers: Vec<TagHandler> = vec![
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
Rc::new(RefCell::new(markdown::ParagraphHandler)),
Rc::new(RefCell::new(markdown::HeadingHandler)),
Rc::new(RefCell::new(markdown::ListHandler)),
Rc::new(RefCell::new(markdown::TableHandler::new())),
Rc::new(RefCell::new(markdown::StyledTextHandler)),
];
if url.contains("wikipedia.org") {
use html_to_markdown::structure::wikipedia;
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
handlers.push(Rc::new(
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
));
} else {
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
}
convert_html_to_markdown(&body[..], &mut handlers)
}
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
ContentType::Json => {
let json: serde_json::Value = serde_json::from_slice(&body)?;
Ok(format!(
"```json\n{}\n```",
serde_json::to_string_pretty(&json)?
))
}
}
}
}
impl Tool for FetchTool {
fn name(&self) -> String {
"fetch".to_string()
}
fn description(&self) -> String {
include_str!("./fetch_tool/description.md").to_string()
}
fn input_schema(&self) -> serde_json::Value {
let schema = schemars::schema_for!(FetchToolInput);
serde_json::to_value(&schema).unwrap()
}
fn run(
self: Arc<Self>,
input: serde_json::Value,
_messages: &[LanguageModelRequestMessage],
_project: Entity<Project>,
_action_log: Entity<ActionLog>,
cx: &mut App,
) -> Task<Result<String>> {
let input = match serde_json::from_value::<FetchToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))),
};
let text = cx.background_spawn({
let http_client = self.http_client.clone();
let url = input.url.clone();
async move { Self::build_message(http_client, &url).await }
});
cx.foreground_executor().spawn(async move {
let text = text.await?;
if text.trim().is_empty() {
bail!("no textual content found");
}
Ok(text)
})
}
}

View File

@@ -0,0 +1 @@
Fetches a URL and returns the content as Markdown.

View File

@@ -1,12 +1,13 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool}; use assistant_tool::{ActionLog, Tool};
use gpui::{App, Entity, Task}; use gpui::{App, AppContext, Entity, Task};
use language_model::LanguageModelRequestMessage; use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{path::PathBuf, sync::Arc}; use std::{path::PathBuf, sync::Arc};
use util::paths::PathMatcher; use util::paths::PathMatcher;
use worktree::Snapshot;
#[derive(Debug, Serialize, Deserialize, JsonSchema)] #[derive(Debug, Serialize, Deserialize, JsonSchema)]
pub struct PathSearchToolInput { pub struct PathSearchToolInput {
@@ -22,8 +23,15 @@ pub struct PathSearchToolInput {
/// You can get back the first two paths by providing a glob of "*thing*.txt" /// You can get back the first two paths by providing a glob of "*thing*.txt"
/// </example> /// </example>
pub glob: String, pub glob: String,
/// Optional starting position for paginated results (0-based).
/// When not provided, starts from the beginning.
#[serde(default)]
pub offset: Option<usize>,
} }
const RESULTS_PER_PAGE: usize = 50;
pub struct PathSearchTool; pub struct PathSearchTool;
impl Tool for PathSearchTool { impl Tool for PathSearchTool {
@@ -48,42 +56,66 @@ impl Tool for PathSearchTool {
_action_log: Entity<ActionLog>, _action_log: Entity<ActionLog>,
cx: &mut App, cx: &mut App,
) -> Task<Result<String>> { ) -> Task<Result<String>> {
let glob = match serde_json::from_value::<PathSearchToolInput>(input) { let (offset, glob) = match serde_json::from_value::<PathSearchToolInput>(input) {
Ok(input) => input.glob, Ok(input) => (input.offset.unwrap_or(0), input.glob),
Err(err) => return Task::ready(Err(anyhow!(err))), Err(err) => return Task::ready(Err(anyhow!(err))),
}; };
let path_matcher = match PathMatcher::new(&[glob.clone()]) { let path_matcher = match PathMatcher::new(&[glob.clone()]) {
Ok(matcher) => matcher, Ok(matcher) => matcher,
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))), Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
}; };
let snapshots: Vec<Snapshot> = project
.read(cx)
.worktrees(cx)
.map(|worktree| worktree.read(cx).snapshot())
.collect();
let mut matches = Vec::new(); cx.background_spawn(async move {
let mut matches = Vec::new();
for worktree_handle in project.read(cx).worktrees(cx) { for worktree in snapshots {
let worktree = worktree_handle.read(cx); let root_name = worktree.root_name();
let root_name = worktree.root_name();
// Don't consider ignored entries. // Don't consider ignored entries.
for entry in worktree.entries(false, 0) { for entry in worktree.entries(false, 0) {
if path_matcher.is_match(&entry.path) { if path_matcher.is_match(&entry.path) {
matches.push( matches.push(
PathBuf::from(root_name) PathBuf::from(root_name)
.join(&entry.path) .join(&entry.path)
.to_string_lossy() .to_string_lossy()
.to_string(), .to_string(),
); );
}
} }
} }
}
if matches.is_empty() { if matches.is_empty() {
Task::ready(Ok(format!( Ok(format!("No paths in the project matched the glob {glob:?}"))
"No paths in the project matched the glob {glob:?}" } else {
))) // Sort to group entries in the same directory together.
} else { matches.sort();
// Sort to group entries in the same directory together.
matches.sort(); let total_matches = matches.len();
Task::ready(Ok(matches.join("\n"))) let response = if total_matches > offset + RESULTS_PER_PAGE {
} let paginated_matches: Vec<_> = matches
.into_iter()
.skip(offset)
.take(RESULTS_PER_PAGE)
.collect();
format!(
"Found {} total matches. Showing results {}-{} (provide 'offset' parameter for more results):\n\n{}",
total_matches,
offset + 1,
offset + paginated_matches.len(),
paginated_matches.join("\n")
)
} else {
matches.join("\n")
};
Ok(response)
}
})
} }
} }

View File

@@ -1 +1,3 @@
Returns all the paths in the project which match the given glob. Returns paths in the project which match the given glob.
Results are paginated with 50 matches per page. Use the optional 'offset' parameter to request subsequent pages.

View File

@@ -4,6 +4,7 @@ use std::sync::Arc;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use assistant_tool::{ActionLog, Tool}; use assistant_tool::{ActionLog, Tool};
use gpui::{App, Entity, Task}; use gpui::{App, Entity, Task};
use itertools::Itertools;
use language_model::LanguageModelRequestMessage; use language_model::LanguageModelRequestMessage;
use project::Project; use project::Project;
use schemars::JsonSchema; use schemars::JsonSchema;
@@ -26,6 +27,14 @@ pub struct ReadFileToolInput {
/// If you wanna access `file.txt` in `directory2`, you should use the path `directory2/file.txt`. /// If you wanna access `file.txt` in `directory2`, you should use the path `directory2/file.txt`.
/// </example> /// </example>
pub path: Arc<Path>, pub path: Arc<Path>,
/// Optional line number to start reading on (1-based index)
#[serde(default)]
pub start_line: Option<usize>,
/// Optional line number to end reading on (1-based index)
#[serde(default)]
pub end_line: Option<usize>,
} }
pub struct ReadFileTool; pub struct ReadFileTool;
@@ -49,7 +58,7 @@ impl Tool for ReadFileTool {
input: serde_json::Value, input: serde_json::Value,
_messages: &[LanguageModelRequestMessage], _messages: &[LanguageModelRequestMessage],
project: Entity<Project>, project: Entity<Project>,
_action_log: Entity<ActionLog>, action_log: Entity<ActionLog>,
cx: &mut App, cx: &mut App,
) -> Task<Result<String>> { ) -> Task<Result<String>> {
let input = match serde_json::from_value::<ReadFileToolInput>(input) { let input = match serde_json::from_value::<ReadFileToolInput>(input) {
@@ -60,23 +69,35 @@ impl Tool for ReadFileTool {
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else { let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
return Task::ready(Err(anyhow!("Path not found in project"))); return Task::ready(Err(anyhow!("Path not found in project")));
}; };
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
let buffer = cx let buffer = cx
.update(|cx| { .update(|cx| {
project.update(cx, |project, cx| project.open_buffer(project_path, cx)) project.update(cx, |project, cx| project.open_buffer(project_path, cx))
})? })?
.await?; .await?;
buffer.read_with(&cx, |buffer, _cx| { let result = buffer.read_with(cx, |buffer, _cx| {
if buffer let text = buffer.text();
.file() if input.start_line.is_some() || input.end_line.is_some() {
.map_or(false, |file| file.disk_state().exists()) let start = input.start_line.unwrap_or(1);
{ let lines = text.split('\n').skip(start - 1);
Ok(buffer.text()) if let Some(end) = input.end_line {
let count = end.saturating_sub(start);
Itertools::intersperse(lines.take(count), "\n").collect()
} else {
Itertools::intersperse(lines, "\n").collect()
}
} else { } else {
Err(anyhow!("File does not exist")) text
} }
})? })?;
action_log.update(cx, |log, cx| {
log.buffer_read(buffer, cx);
})?;
anyhow::Ok(result)
}) })
} }
} }

View File

@@ -4,7 +4,10 @@ use futures::StreamExt;
use gpui::{App, Entity, Task}; use gpui::{App, Entity, Task};
use language::OffsetRangeExt; use language::OffsetRangeExt;
use language_model::LanguageModelRequestMessage; use language_model::LanguageModelRequestMessage;
use project::{search::SearchQuery, Project}; use project::{
search::{SearchQuery, SearchResult},
Project,
};
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{cmp, fmt::Write, sync::Arc}; use std::{cmp, fmt::Write, sync::Arc};
@@ -15,8 +18,15 @@ pub struct RegexSearchToolInput {
/// A regex pattern to search for in the entire project. Note that the regex /// A regex pattern to search for in the entire project. Note that the regex
/// will be parsed by the Rust `regex` crate. /// will be parsed by the Rust `regex` crate.
pub regex: String, pub regex: String,
/// Optional starting position for paginated results (0-based).
/// When not provided, starts from the beginning.
#[serde(default)]
pub offset: Option<usize>,
} }
const RESULTS_PER_PAGE: usize = 20;
pub struct RegexSearchTool; pub struct RegexSearchTool;
impl Tool for RegexSearchTool { impl Tool for RegexSearchTool {
@@ -43,13 +53,13 @@ impl Tool for RegexSearchTool {
) -> Task<Result<String>> { ) -> Task<Result<String>> {
const CONTEXT_LINES: u32 = 2; const CONTEXT_LINES: u32 = 2;
let input = match serde_json::from_value::<RegexSearchToolInput>(input) { let (offset, regex) = match serde_json::from_value::<RegexSearchToolInput>(input) {
Ok(input) => input, Ok(input) => (input.offset.unwrap_or(0), input.regex),
Err(err) => return Task::ready(Err(anyhow!(err))), Err(err) => return Task::ready(Err(anyhow!(err))),
}; };
let query = match SearchQuery::regex( let query = match SearchQuery::regex(
&input.regex, &regex,
false, false,
false, false,
false, false,
@@ -62,20 +72,23 @@ impl Tool for RegexSearchTool {
}; };
let results = project.update(cx, |project, cx| project.search(query, cx)); let results = project.update(cx, |project, cx| project.search(query, cx));
cx.spawn(|cx| async move {
cx.spawn(async move|cx| {
futures::pin_mut!(results); futures::pin_mut!(results);
let mut output = String::new(); let mut output = String::new();
while let Some(project::search::SearchResult::Buffer { buffer, ranges }) = let mut skips_remaining = offset;
results.next().await let mut matches_found = 0;
{ let mut has_more_matches = false;
while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await {
if ranges.is_empty() { if ranges.is_empty() {
continue; continue;
} }
buffer.read_with(&cx, |buffer, cx| { buffer.read_with(cx, |buffer, cx| -> Result<(), anyhow::Error> {
if let Some(path) = buffer.file().map(|file| file.full_path(cx)) { if let Some(path) = buffer.file().map(|file| file.full_path(cx)) {
writeln!(output, "### Found matches in {}:\n", path.display()).unwrap(); let mut file_header_written = false;
let mut ranges = ranges let mut ranges = ranges
.into_iter() .into_iter()
.map(|range| { .map(|range| {
@@ -93,6 +106,17 @@ impl Tool for RegexSearchTool {
.peekable(); .peekable();
while let Some(mut range) = ranges.next() { while let Some(mut range) = ranges.next() {
if skips_remaining > 0 {
skips_remaining -= 1;
continue;
}
// We'd already found a full page of matches, and we just found one more.
if matches_found >= RESULTS_PER_PAGE {
has_more_matches = true;
return Ok(());
}
while let Some(next_range) = ranges.peek() { while let Some(next_range) = ranges.peek() {
if range.end.row >= next_range.start.row { if range.end.row >= next_range.start.row {
range.end = next_range.end; range.end = next_range.end;
@@ -102,18 +126,36 @@ impl Tool for RegexSearchTool {
} }
} }
writeln!(output, "```").unwrap(); if !file_header_written {
writeln!(output, "\n## Matches in {}", path.display())?;
file_header_written = true;
}
let start_line = range.start.row + 1;
let end_line = range.end.row + 1;
writeln!(output, "\n### Lines {start_line}-{end_line}\n```")?;
output.extend(buffer.text_for_range(range)); output.extend(buffer.text_for_range(range));
writeln!(output, "\n```\n").unwrap(); output.push_str("\n```\n");
matches_found += 1;
} }
} }
})?;
Ok(())
})??;
} }
if output.is_empty() { if matches_found == 0 {
Ok("No matches found".to_string()) Ok("No matches found".to_string())
} else { } else if has_more_matches {
Ok(output) Ok(format!(
"Showing matches {}-{} (there were more matches found; use offset: {} to see next page):\n{output}",
offset + 1,
offset + matches_found,
offset + RESULTS_PER_PAGE,
))
} else {
Ok(format!("Found {matches_found} matches:\n{output}"))
} }
}) })
} }

View File

@@ -1,3 +1,5 @@
Searches the entire project for the given regular expression. Searches the entire project for the given regular expression.
Returns a list of paths that matched the query. For each path, it returns a list of excerpts of the matched text. Returns a list of paths that matched the query. For each path, it returns a list of excerpts of the matched text.
Results are paginated with 20 matches per page. Use the optional 'offset' parameter to request subsequent pages.

View File

@@ -252,11 +252,9 @@ impl AutoUpdater {
} }
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> { pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| loop {
loop { this.update(cx, |this, cx| this.poll(cx))?;
this.update(&mut cx, |this, cx| this.poll(cx))?; cx.background_executor().timer(POLL_INTERVAL).await;
cx.background_executor().timer(POLL_INTERVAL).await;
}
}) })
} }
@@ -267,9 +265,9 @@ impl AutoUpdater {
cx.notify(); cx.notify();
self.pending_poll = Some(cx.spawn(|this, mut cx| async move { self.pending_poll = Some(cx.spawn(async move |this, cx| {
let result = Self::update(this.upgrade()?, cx.clone()).await; let result = Self::update(this.upgrade()?, cx.clone()).await;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.pending_poll = None; this.pending_poll = None;
if let Err(error) = result { if let Err(error) = result {
log::error!("auto-update failed: error:{:?}", error); log::error!("auto-update failed: error:{:?}", error);

View File

@@ -64,7 +64,7 @@ fn view_release_notes_locally(
workspace workspace
.with_local_workspace(window, cx, move |_, window, cx| { .with_local_workspace(window, cx, move |_, window, cx| {
cx.spawn_in(window, |workspace, mut cx| async move { cx.spawn_in(window, async move |workspace, cx| {
let markdown = markdown.await.log_err(); let markdown = markdown.await.log_err();
let response = client.get(&url, Default::default(), true).await; let response = client.get(&url, Default::default(), true).await;
let Some(mut response) = response.log_err() else { let Some(mut response) = response.log_err() else {
@@ -79,7 +79,7 @@ fn view_release_notes_locally(
if let Ok(body) = body { if let Ok(body) = body {
workspace workspace
.update_in(&mut cx, |workspace, window, cx| { .update_in(cx, |workspace, window, cx| {
let project = workspace.project().clone(); let project = workspace.project().clone();
let buffer = project.update(cx, |project, cx| { let buffer = project.update(cx, |project, cx| {
project.create_local_buffer("", markdown, cx) project.create_local_buffer("", markdown, cx)
@@ -130,7 +130,7 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
return; return;
}; };
let should_show_notification = updater.read(cx).should_show_update_notification(cx); let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(|cx| async move { cx.spawn(async move |cx| {
let should_show_notification = should_show_notification.await?; let should_show_notification = should_show_notification.await?;
if should_show_notification { if should_show_notification {
cx.update(|cx| { cx.update(|cx| {

View File

@@ -1080,12 +1080,12 @@ impl BufferDiff {
let complete_on_drop = util::defer(|| { let complete_on_drop = util::defer(|| {
tx.send(()).ok(); tx.send(()).ok();
}); });
cx.spawn(|_, mut cx| async move { cx.spawn(async move |_, cx| {
let snapshot = snapshot.await; let snapshot = snapshot.await;
let Some(this) = this.upgrade() else { let Some(this) = this.upgrade() else {
return; return;
}; };
this.update(&mut cx, |this, _| { this.update(cx, |this, _| {
this.set_state(snapshot, &buffer); this.set_state(snapshot, &buffer);
}) })
.log_err(); .log_err();

View File

@@ -54,10 +54,10 @@ impl OneAtATime {
{ {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
self.cancel.replace(tx); self.cancel.replace(tx);
cx.spawn(|cx| async move { cx.spawn(async move |cx| {
futures::select_biased! { futures::select_biased! {
_ = rx.fuse() => Ok(None), _ = rx.fuse() => Ok(None),
result = f(cx).fuse() => result.map(Some), result = f(cx.clone()).fuse() => result.map(Some),
} }
}) })
} }
@@ -192,19 +192,19 @@ impl ActiveCall {
}; };
let invite = if let Some(room) = room { let invite = if let Some(room) = room {
cx.spawn(move |_, mut cx| async move { cx.spawn(async move |_, cx| {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?; let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project { let initial_project_id = if let Some(initial_project) = initial_project {
Some( Some(
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))? room.update(cx, |room, cx| room.share_project(initial_project, cx))?
.await?, .await?,
) )
} else { } else {
None None
}; };
room.update(&mut cx, move |room, cx| { room.update(cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx) room.call(called_user_id, initial_project_id, cx)
})? })?
.await?; .await?;
@@ -215,7 +215,7 @@ impl ActiveCall {
let client = self.client.clone(); let client = self.client.clone();
let user_store = self.user_store.clone(); let user_store = self.user_store.clone();
let room = cx let room = cx
.spawn(move |this, mut cx| async move { .spawn(async move |this, cx| {
let create_room = async { let create_room = async {
let room = cx let room = cx
.update(|cx| { .update(|cx| {
@@ -229,14 +229,14 @@ impl ActiveCall {
})? })?
.await?; .await?;
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))? this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?; .await?;
anyhow::Ok(room) anyhow::Ok(room)
}; };
let room = create_room.await; let room = create_room.await;
this.update(&mut cx, |this, _| this.pending_room_creation = None)?; this.update(cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new) room.map_err(Arc::new)
}) })
.shared(); .shared();
@@ -247,10 +247,10 @@ impl ActiveCall {
}) })
}; };
cx.spawn(move |this, mut cx| async move { cx.spawn(async move |this, cx| {
let result = invite.await; let result = invite.await;
if result.is_ok() { if result.is_ok() {
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.report_call_event("Participant Invited", cx) this.report_call_event("Participant Invited", cx)
})?; })?;
} else { } else {
@@ -258,7 +258,7 @@ impl ActiveCall {
log::error!("invite failed: {:?}", result); log::error!("invite failed: {:?}", result);
} }
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.pending_invites.remove(&called_user_id); this.pending_invites.remove(&called_user_id);
cx.notify(); cx.notify();
})?; })?;
@@ -315,11 +315,11 @@ impl ActiveCall {
._join_debouncer ._join_debouncer
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx)); .spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let room = join.await?; let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))? this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| {
this.report_call_event("Incoming Call Accepted", cx) this.report_call_event("Incoming Call Accepted", cx)
})?; })?;
Ok(()) Ok(())
@@ -363,13 +363,11 @@ impl ActiveCall {
Room::join_channel(channel_id, client, user_store, cx).await Room::join_channel(channel_id, client, user_store, cx).await
}); });
cx.spawn(|this, mut cx| async move { cx.spawn(async move |this, cx| {
let room = join.await?; let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))? this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?; .await?;
this.update(&mut cx, |this, cx| { this.update(cx, |this, cx| this.report_call_event("Channel Joined", cx))?;
this.report_call_event("Channel Joined", cx)
})?;
Ok(room) Ok(room)
}) })
} }

Some files were not shown because too many files have changed in this diff Show More