Compare commits

..

67 Commits

Author SHA1 Message Date
Richard Feldman
1ff8521612 wip 2025-03-20 09:36:42 -04:00
Richard Feldman
ca22d5d4a3 Add shell_parser crate 2025-03-19 22:19:16 -04:00
renovate[bot]
1cf252f8eb Update Rust crate semver to v1.0.26 (#27143)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [semver](https://redirect.github.com/dtolnay/semver) |
workspace.dependencies | patch | `1.0.25` -> `1.0.26` |

---

### Release Notes

<details>
<summary>dtolnay/semver (semver)</summary>

###
[`v1.0.26`](https://redirect.github.com/dtolnay/semver/releases/tag/1.0.26)

[Compare
Source](https://redirect.github.com/dtolnay/semver/compare/1.0.25...1.0.26)

-   Documentation improvements

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-20 01:20:45 +00:00
Julia Ryan
e46c72f4a8 nix: Add nightly build job with cachix (#27014)
I'll be using this to `nix run github:zed-industries/zed/nightly` and
get an up-to-date and cached nightly build.

It'll also serve as a way to warn me when the nix build is broken,
rather than having to wait for users to report it.

Eventually and depending on the build time of the nix builds, we may
want to consider putting a nix build in CI (#17458) to prevent
breakages, but for now a best-effort nightly build that doesn't block
the job if it fails is a good start.

Resolve #19937

Release Notes:

- N/A
2025-03-20 00:16:06 +00:00
renovate[bot]
63f656faae Update Rust crate async-compression to v0.4.21 (#27122)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[async-compression](https://redirect.github.com/Nullus157/async-compression)
| workspace.dependencies | patch | `0.4.20` -> `0.4.21` |

---

### Release Notes

<details>
<summary>Nullus157/async-compression (async-compression)</summary>

###
[`v0.4.21`](https://redirect.github.com/Nullus157/async-compression/blob/HEAD/CHANGELOG.md#0421---2025-03-15)

[Compare
Source](https://redirect.github.com/Nullus157/async-compression/compare/v0.4.20...v0.4.21)

##### Fixed

- When flate encoding, do not mark internal state as flushed if it ran
out of buffer space.
- Add debug assertion in `produce` method to check buffer capacity in
implementations for `BufWriter`.

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 18:36:27 -04:00
renovate[bot]
31b8c36479 Update Rust crate async-std to v1.13.1 (#27127)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [async-std](https://async.rs)
([source](https://redirect.github.com/async-rs/async-std)) |
dependencies | patch | `1.13.0` -> `1.13.1` |

---

### Release Notes

<details>
<summary>async-rs/async-std (async-std)</summary>

###
[`v1.13.1`](https://redirect.github.com/async-rs/async-std/blob/HEAD/CHANGELOG.md#1131---2025-02-21)

[Compare
Source](https://redirect.github.com/async-rs/async-std/compare/v1.13.0...v1.13.1)

`async-std` has officially been discontinued. We recommend that all
users and
libraries migrate to the excellent
[`smol`](https://redirect.github.com/smol-rs/smol/)
project.

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 18:35:56 -04:00
Agus Zubiaga
dfdca540ec assistant2: Handle empty tool results by providing placeholder text (#27130)
This is surprising, but the Anthropic API returns a 400 if a tool output
is an empty string because it thinks we're attaching a `tool use`
without a corresponding `tool result`, but we are not, it's just empty
(which seems totally reasonable) 🙃

Release Notes:

- N/A
2025-03-19 22:30:49 +00:00
renovate[bot]
14c036931d Update Rust crate async-trait to v0.1.88 (#27128)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [async-trait](https://redirect.github.com/dtolnay/async-trait) |
workspace.dependencies | patch | `0.1.87` -> `0.1.88` |

---

### Release Notes

<details>
<summary>dtolnay/async-trait (async-trait)</summary>

###
[`v0.1.88`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.88)

[Compare
Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.87...0.1.88)

- Fix lifetime bounding on generic parameters that have cfg
([#&#8203;289](https://redirect.github.com/dtolnay/async-trait/issues/289))

</details>

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 22:17:00 +00:00
Mikayla Maki
5387ae9ed8 Add documentation for secondary modifier (#27129)
Follow up to: https://github.com/zed-industries/zed/pull/26390

Release Notes:

- N/A
2025-03-19 22:05:33 +00:00
Angelo Verlain Shema
c30fb5f1ec Use shell script language for APKBUILD files (#27099)
`APKBUILD` files are similar to `PKGBUILD` used by arch linux, but are
used to build alpine linux packages:
https://wiki.alpinelinux.org/wiki/APKBUILD_Reference

Release Notes:

- Added recognition for `APKBUILD` files as "Shell Script".
2025-03-19 22:00:44 +00:00
renovate[bot]
f7e2b7b679 Update actions/upload-artifact digest to ea165f8 (#27115)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[actions/upload-artifact](https://redirect.github.com/actions/upload-artifact)
| action | digest | `4cec3d8` -> `ea165f8` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:55:06 -04:00
renovate[bot]
b3bf3e2d53 Update cloudflare/wrangler-action digest to da0e0df (#27116)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
|
[cloudflare/wrangler-action](https://redirect.github.com/cloudflare/wrangler-action)
| action | digest | `392082e` -> `da0e0df` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:54:25 -04:00
renovate[bot]
1cc59b317c Update actions/setup-node digest to cdca736 (#27108)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [actions/setup-node](https://redirect.github.com/actions/setup-node) |
action | digest | `1d0ff46` -> `cdca736` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 17:52:19 -04:00
Marshall Bowers
efd3f8a8f1 assistant2: Add initial concept of profiles (#27123)
This PR adds the initial concept of agent profiles to Assistant 2.

Right now these are just collections of tools that can quickly be
enabled together:


https://github.com/user-attachments/assets/7c7f9cc8-a5e5-492f-96f7-79697bbf3d72

There are currently two profiles:

- `Read-only` - Consists only of tools that do not perform writes.
- `Code Writer` - Consists of all tools for writing code, with the
exception of the `lua-interpreter`.

Release Notes:

- N/A
2025-03-19 21:48:14 +00:00
Marshall Bowers
930dba4a7f Upgrade thiserror to v2.0 (#27117)
This PR upgrades `thiserror` to v2.0.

We were still on v1.0, but a number of our dependencies have already
moved to v2.0.

Release Notes:

- N/A
2025-03-19 20:47:38 +00:00
renovate[bot]
7cfd919523 Pin actions/checkout action to 11bd719 (#27107)
This PR contains the following updates:

| Package | Type | Update | Change |
|---|---|---|---|
| [actions/checkout](https://redirect.github.com/actions/checkout) |
action | pinDigest | -> `11bd719` |

---

### Configuration

📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone
America/New_York, Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled by config. Please merge this manually once you
are satisfied.

♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the
rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update
again.

---

- [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check
this box

---

Release Notes:

- N/A

<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4yMDcuMSIsInVwZGF0ZWRJblZlciI6IjM5LjIwNy4xIiwidGFyZ2V0QnJhbmNoIjoibWFpbiIsImxhYmVscyI6W119-->

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-03-19 16:46:51 -04:00
Peter Tripp
edd1b48e7c ci: Send emails for weekly release (#27102)
Release Notes:

- N/A
2025-03-19 16:16:34 -04:00
Michael Sloan
3ec69a5bc0 Make getting keybinding for display more efficient (#27046)
No longer iterates all the matching bindings, and no longer clones the
result.

Release Notes:

- N/A
2025-03-19 20:15:33 +00:00
Antonio Scandurra
33faa66e35 Start on a Git-based review flow (#27103)
Release Notes:

- N/A

---------

Co-authored-by: Nathan Sobo <nathan@zed.dev>
2025-03-19 19:00:21 +00:00
Marshall Bowers
68262fe7e4 theme: Add fallback colors for version_control.<variant> properties (#27104)
This PR adds fallback colors for the `version_control.<variant>` theme
properties.

This fixes the colors when themes do not provide the properties.

Related to  https://github.com/zed-industries/zed/pull/26951.

Release Notes:

- Added fallback colors for the `version_control.<variant>` theme
properties.
2025-03-19 18:48:35 +00:00
Joseph T. Lyons
2491426be7 Fix release notes API call with heredoc syntax (#27096)
Release Notes:

- N/A
2025-03-19 13:50:46 -04:00
Marshall Bowers
4487dc1064 assistant2: Add a button to open the extensions view to install more context servers (#27095)
This PR adds a new button in the Assistant configuration view to open
the extensions view pre-filtered to extensions that provide context
servers.


https://github.com/user-attachments/assets/3bc77507-c8b8-4bc6-8a17-ab5d8b3b7c8a

Release Notes:

- N/A
2025-03-19 17:48:42 +00:00
Piotr Osiewicz
e03edc2a76 debugger: Do not allow setting breakpoints in buffers without file storage (#27094)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 18:40:31 +01:00
Marshall Bowers
d722067000 extensions_ui: Add ability to open the extensions view with a pre-selected filter (#27093)
This PR adds the ability to open the extensions view via the `zed:
extensions` action with a pre-selected filter.

The "Install Themes" and "Install Icon Themes" buttons in their
respective selectors take advantage of this to set the filter when
opening the view:


https://github.com/user-attachments/assets/2e345c0f-418a-47b6-811e-cabae6c616d1

Release Notes:

- N/A
2025-03-19 17:26:46 +00:00
Kirill Bulatov
d51cd15e4d Remove an unused field in Diagnostic from zed.proto (#27091)
Release Notes:

- N/A
2025-03-19 17:15:43 +00:00
loczek
ef14bc8e76 docs: Add better snippets documentation (#26853)
Improved snippets docs

Release Notes:

- N/A
2025-03-19 18:05:05 +01:00
Marshall Bowers
9fe243efa5 gpui: Update doc comment for App::new (#27089)
This PR updates the doc comment for the `App::new` method.

Release Notes:

- N/A
2025-03-19 16:51:19 +00:00
Max Brunsfeld
74a39c7263 Make FakeGitRepository behave more like a real git repository (#26961)
This PR reworks the `FakeGitRepository` type that we use for testing git
interactions, to make it more realistic. In particular, the `status`
method now derives the Git status from the differences between HEAD, the
index, and the working copy. This way, if you modify a file in the
`FakeFs`, the Git repository's `status` method will reflect that
modification.

Release Notes:

- N/A

---------

Co-authored-by: Junkui Zhang <364772080@qq.com>
2025-03-19 16:04:27 +00:00
Agus Zubiaga
5f398071b2 assistant2: Skip tool uses without a matching tool result (#27082)
Anthropic API doesn't allow `tool_use` messages without a corresponding
`tool_result`, so we'll skip those when building a request. I'll
separately investigate why we are sending request before the tool result
as that might lead to separate issues, but that might take a while and
this is currently very frustrating.

Release Notes:

- N/A
2025-03-19 15:54:57 +00:00
Marshall Bowers
410a942d57 assistant2: Add ability to start and stop context servers (#27080)
This PR adds the ability to start and stop context servers from within
the configuration view in the Assistant panel:


https://github.com/user-attachments/assets/93c3a7cb-d799-4286-88ba-c13cc26e959a

Release Notes:

- N/A
2025-03-19 15:37:48 +00:00
Joseph T. Lyons
06ffdc6791 Bump Zed to v0.180 (#27083)
Release Notes:

-N/A
2025-03-19 11:33:30 -04:00
Marshall Bowers
394215599a assistant2: Fix broken merge (#27081)
This PR fixes a broken merge caused by
https://github.com/zed-industries/zed/pull/26987 landing after
https://github.com/zed-industries/zed/pull/26758.

Release Notes:

- N/A
2025-03-19 15:26:19 +00:00
Richard Feldman
e8a40085de Allow tools to read unsaved buffers (#26987)
If the tool asks to read a path, we don't need to verify whether that
path exists on disk; an unsaved buffer with that path is fine.

Release Notes:

- N/A
2025-03-19 14:59:10 +00:00
Richard Feldman
6303751325 Record token usage telemetry (#26962)
<img width="1103" alt="Screenshot 2025-03-17 at 9 47 32 PM"
src="https://github.com/user-attachments/assets/947cf33d-4464-4305-8ff0-3630529d2f81"
/>


Release Notes:

- N/A
2025-03-19 10:47:46 -04:00
Antonio Scandurra
3edf930007 Revert "Start tracking edits performed by the agent" (#27077)
Reverts zed-industries/zed#27064
2025-03-19 15:33:08 +01:00
Jakub Čermák
584a70ca5e Refactor Git panel styling & status colors for consistency (#26951)
Closes #26847

Release Notes:

- Updated Git panel background to use panel_background instead of
ElevationIndex::Surface.bg(cx) for consistency with other panels.
- Removed redundant GitStatusColors struct from status.rs and refactored
to use existing theme colors.
- Adjusted Color enum mappings in color.rs to reference
version_control_* colors instead of status() for better alignment with
the theme system.
- Cleaned up unused or redundant code.
2025-03-19 10:26:36 -04:00
Smit Barmase
2230f3b09d editor: Preserve expand excerpt down button position (#27058)
When you press the "Expand Excerpt Down" button, the editor will scroll
up by the same amount to keep the button in same place. This allows you
to expand the excerpt rapidly without moving your mouse.

Before:


https://github.com/user-attachments/assets/376350ac-6f21-4ce0-a383-b2c9ca4f45bb

After:


https://github.com/user-attachments/assets/4fba4173-5f01-4220-990a-65820ac40cf5

Release Notes:

- Improved "Expand Excerpt Down" so the button stays in place, allowing
rapid expansion without moving the mouse.
2025-03-19 19:54:52 +05:30
5brian
84a8d48178 vim: Fix space not handling non-ascii characters (#27053)
Closes #26806

Changes: Clips the new point with `Bias::Right` like in
`saturating_right`

Release Notes:

- vim: Fixed `space` not handling non-ascii characters
2025-03-19 07:28:50 -06:00
Antonio Scandurra
ac5dafc6b2 Start tracking edits performed by the agent (#27064)
Release Notes:

- N/A

---------

Co-authored-by: Danilo Leal <daniloleal09@gmail.com>
Co-authored-by: Agus Zubiaga <hi@aguz.me>
2025-03-19 13:07:25 +00:00
Piotr Osiewicz
23686aa394 debugger: Do not use Disclosure for attach button (#27068)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 14:01:33 +01:00
Danilo Leal
3874d315ec assistant2: Adjust text and padding alignment between messages (#27067)
Ensuring that text between the "you" messages align with text in the
assistant response. This also creates a nice subtle hierarchy effect
where the "you" message card is wider than the message, making it
slightly easier to tell them apart.

<img
src="https://github.com/user-attachments/assets/616c1776-ca51-454e-9d52-e480bf26c843"
width="600px" />

Release Notes:

- N/A
2025-03-19 09:43:33 -03:00
Agus Zubiaga
1d33bfde37 assistant edit tool: Replace with flexible indentation (#27039)
Sometimes the model produces SEARCH queries that don't match the
indentation of the source file exactly.

When we can't find an exact match, we'll now attempt to match the lines
while being more flexible about the leading whitespace as long as all
lines are consistently offset from the source, and extend the leading
whitespace in the REPLACE string accordingly.

Release Notes:

- N/A
2025-03-19 09:39:00 -03:00
Piotr Osiewicz
9377ef9817 feature_flags: Do not enable feature flags by default in dev builds (#27065)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 12:20:26 +00:00
Piotr Osiewicz
c3b5046347 editor: Do not use breakpoint color for run indicators (#27063)
Closes #ISSUE

Release Notes:

- N/A
2025-03-19 11:54:14 +00:00
Piotr Osiewicz
44fff08ed6 util: Include path to asset in panic message from asset_str (#27059)
Somebody on Discord ran into issues with running the debugger which goes
down to an unwrap in asset_str. Let's print a path that was accessed.

Release Notes:

- N/A
2025-03-19 11:09:51 +00:00
Anthony Eid
d4daa0a3a2 Show debug console evaluation response (#27050)
We weren't incrementing the output token when getting responses from the
debug evaluation request which caused some output to not be displayed.
(Usually the evaluation response, but that could cascade into other
output events not showing)


Release Notes:
- N/A

Co-authored-by: Remco Smits <djsmits12@gmail.com>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
2025-03-19 05:37:32 +00:00
Conrad Irwin
81582cd7f3 Don't render breakpoint indicators on top of expand arrows (#27048)
Closes #ISSUE

cc @Anthony-Eid. One thing I noticed while doing this is that we do an
invalid cast here from DisplayPoint.row to MultiBufferRow. These are not
the same if you have soft-wrap enabled (or anything else in the display
map that's not in the editor).

Release Notes:

- N/A
2025-03-19 05:00:41 +00:00
Ryan Hawkins
0f5a3afe94 Support built-in Zed prompts for all platforms (#26201)
This pull request does two things:

1. Adds a setting to force Zed to use the built-in prompts, instead of
the system provided ones. I've personally found the system prompts on
macOS often fail to respond to keyboard input, are slow to render
initially, and don't match Zed's style.
2. Makes the previously Linux-only Zed provided prompts available to
everybody using the above setting.

Release Notes:
- Added support for a built-in prompting system, regardless of platform.
Use the new `use_system_prompts` setting to control whether to use the
system provided prompts or Zed's built-in system. Note that on Linux,
this setting has no effect, as Linux doesn't have a system prompting
mechanism.
2025-03-18 22:27:09 -06:00
CharlesChen0823
382f9f6151 language_tools: Fix buffer search keeping focusing when pressing enter in vim mode (#26266)
Closes #25643 

Release Notes:

- Fixed buffer search keep focus when pressing enter in vim mode

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-19 04:25:29 +00:00
5brian
15d2420031 workspace::Open: Fix trapped cursor/selection on update (#25402)
Closes #ISSUE

Issue: Selection index does not reset when the matches update, which can
lead to the selection getting trapped when that index does not exist in
the next matches.


https://github.com/user-attachments/assets/d3fab23f-750c-47fb-bd3b-a0c42f214c83

This is in workspace::Open with   "use_system_path_prompts": false

Release Notes:

- N/A

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-18 22:19:11 -06:00
CharlesChen0823
026c7274d9 workspace: Add function to save new file in directory nearest tab (#22563)
Closes #15685

Release Notes:

- save new file in directory neasrest tab

---------

Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
2025-03-19 03:41:04 +00:00
Mikayla Maki
1aefa5178b Move "async move" a few characters to the left in cx.spawn() (#26758)
This is the core change:
https://github.com/zed-industries/zed/pull/26758/files#diff-044302c0d57147af17e68a0009fee3e8dcdfb4f32c27a915e70cfa80e987f765R1052

TODO:
- [x] Use AsyncFn instead of Fn() -> Future in GPUI spawn methods
- [x] Implement it in the whole app
- [x] Implement it in the debugger 
- [x] Glance at the RPC crate, and see if those box future methods can
be switched over. Answer: It can't directly, as you can't make an
AsyncFn* into a trait object. There's ways around that, but they're all
more complex than just keeping the code as is.
- [ ] Fix platform specific code

Release Notes:

- N/A
2025-03-19 02:09:02 +00:00
João Marcos
7f2e3fb5bd Fix git stage race condition with delayed fs events (#27036)
This PR adds a failing test `test_staging_hunks_with_delayed_fs_event`
and makes it pass

Also skips a queued read for git diff states if another read was
requested (less work)

This still doesn't catch all race conditions, but the PR is getting long
so I'll yield this and start another branch

Release Notes:

- N/A
2025-03-18 22:44:36 -03:00
Agus Zubiaga
68a572873b assistant edit tool: Improve bad search output (#27012)
When we failed to match a search string, we were reporting the replace
string as not found, this confuses the model and can make it go into a
doom loop. This PR fixes that improves the error output in general to
help it recover faster.

Release Notes:

- N/A
2025-03-18 21:53:20 -03:00
Piotr Osiewicz
c042a02cf4 debugger: First slight pass at UI (#27034)
- Collapse Launch and Attach into a single split button
- Fix code actions indicator being colored red.

Release Notes:

- N/A
2025-03-19 00:15:48 +00:00
Julia Ryan
73ac3d9a99 nix: Fix LDFLAGS rpath (#26912)
By default stdenv strips all unused rpaths, but we use a few libraries
that are `dlopen`'d so we need to stop it from removing those. The
[`dontPatchELF`
flag](https://ryantm.github.io/nixpkgs/stdenv/stdenv/#var-stdenv-dontPatchELF)
disables that and makes the nix build work on wayland again.

Fix #26905
Close #26864

Release Notes:

- N/A
2025-03-18 17:04:27 -07:00
Peter Tripp
2269f996f7 Add more shortcuts for delete/restore in Git Panel (#27004)
Release Notes:

- N/A
2025-03-18 18:52:28 -04:00
Marshall Bowers
e9033a75ac assistant2: Remove unneeded debug logging (#27030)
This PR removes the debug logging added in
https://github.com/zed-industries/zed/pull/23722, as we no longer need
it.

Release Notes:

- N/A
2025-03-18 22:12:04 +00:00
Marshall Bowers
a2ae6a1c77 assistant2: Add tool lists for each context server (#27029)
This PR updates the list of context servers with the ability to view the
tools provided by the context server:

<img width="1394" alt="Screenshot 2025-03-18 at 5 53 05 PM"
src="https://github.com/user-attachments/assets/4ffe93dd-f9e9-44e7-877f-656ebf45a326"
/>

Release Notes:

- N/A
2025-03-18 22:04:47 +00:00
Jason Lee
985ac4e5f2 gpui: Reduce window.refresh to improve cache hit of the cached views (#25009)
Release Notes:

- Improved performance when using the scroll wheel and some other mouse
interactions.

Based on some cache details about GPUI `AnyView::cached` that I found in
the discussion of
https://github.com/zed-industries/zed/discussions/24260#discussioncomment-12135749,
and combined with the optimization points found in actual applications.

This change may have some scenarios that I have not considered, so I
just make a draft to put forward my ideas first for discussion.

From my analysis, `AnyView::cached` will always invalid by Div's mouse
events, because of it called `window.refresh`. I understand that (mouse
move event) this is because the interface changes related to hover and
mouse_move will be affected style, so `window.refresh` is required.
Since Div does not have the `entity_id` of View, it is impossible to
know which View should be refreshed, so the entire window can only be
refreshed.

With this change, we can reduce a lot of `render` method calls on
ScrollWheel or Mouse Event.
2025-03-18 14:52:20 -07:00
Kirill Bulatov
89ae4ca9a3 Fix debugger docs a bit (#27026)
Tried adding a custom debugging tasks to discover two more required
properties missing from the docs.

Release Notes:

- N/A
2025-03-18 21:46:11 +00:00
Marshall Bowers
1d4afe6daa assistant2: Add context server list to configuration view (#27028)
This PR adds a context server list to the configuration view in
Assistant2:

<img width="1394" alt="Screenshot 2025-03-18 at 5 26 23 PM"
src="https://github.com/user-attachments/assets/58bf3920-1e35-4cb8-a32a-5ae9f98ce387"
/>

Release Notes:

- N/A
2025-03-18 21:41:39 +00:00
Joseph T. Lyons
777c88bcea Clean up community_release_actions file (#27027)
Release Notes:

- N/A
2025-03-18 21:29:22 +00:00
Kirill Bulatov
959a024861 Omit json-language-server from the scope_opt_in_language_servers (#27023)
Follow-up of https://github.com/zed-industries/zed/pull/26574/files

After that PR, settings.json stopped giving completions when `"` was
typed as a key:

https://github.com/user-attachments/assets/5ff03863-024c-4c28-a7cd-8ef48a1695d8

This goes down to 


fb12863999/crates/language/src/language.rs (L1736-L1748)

which was empty before the PR, hence leading to lower `true` branch.
Now, when typing `"`, there's no scope according to 


fb12863999/crates/project/src/lsp_store.rs (L4529-L4532)

return result.

Removing `json-language-server` from `scope_opt_in_language_servers`
seems to preserve the `:` fix and restore the completions behavior.


Release Notes:

- N/A
2025-03-18 21:08:43 +00:00
Joseph T. Lyons
ed510b5e93 Remove unused AssistantThreadFeedback event (#27021)
It looks like:

- https://github.com/zed-industries/zed/pull/26780

accidentally added a new event type, `AssistantThreadFeedback`, using
the old event system, that it didn't end up actually using, as the code
actually relies on using the newer (preferred) `telemetry::event!()`.

Release Notes:

- N/A
2025-03-18 20:39:54 +00:00
Peter Tripp
674c572a28 ci: Run stalebot checks multiple times to ensure completion (#27017)
Stalebot has a maximum operations-per-run which is set at 1000. As a
result it may require multiple runs to successfully complete.

This morning it took [three
runs](https://github.com/zed-industries/zed/actions/runs/13921563707/attempts/1)
so set it to run three times two hours apart to avoid hitting github API
limits.

Release Notes:

- N/A
2025-03-18 16:34:24 -04:00
Martin Fischer
4a39fc2644 gpui: Provide workaround for AMD Linux driver bug (#26890)
There apparently is some amdgpu/radv bug that rendering with
multisample anti-aliasing (MSAA) results in a crash when the bounds
of a triangle list exceed 1024px, which in Zed happens with the default
buffer font size when you select a line with more than 144 characters.

This crash has been reported as #26143.

This commit introduces a workaround: you can set the
ZED_PATH_SAMPLE_COUNT=0
environment variable to disable MSAA and the error message we print
when a GPU crash is encountered with radv now suggests trying this
environment
variable as a workaround and links the respective issue.

Sidenote: MSAA was introduced in
f08b1d78ec
so you didn't run into this driver bug with versions < 0.173.8.

Release Notes:

- Added a workaround for an AMD Linux driver bug that causes Zed to
crash when selecting long lines.
2025-03-18 20:11:09 +00:00
330 changed files with 7605 additions and 4987 deletions

View File

@@ -10,7 +10,7 @@ runs:
cargo install cargo-nextest --locked
- name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"

View File

@@ -16,7 +16,7 @@ runs:
run: cargo install cargo-nextest --locked
- name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"

View File

@@ -482,7 +482,7 @@ jobs:
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"
@@ -526,14 +526,14 @@ jobs:
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
@@ -586,7 +586,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -595,7 +595,7 @@ jobs:
path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -647,7 +647,7 @@ jobs:
run: script/bundle-linux
- name: Upload Linux bundle to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
@@ -656,7 +656,7 @@ jobs:
path: target/release/zed-*.tar.gz
- name: Upload Linux remote server to workflow run if main branch or specific label
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: |
github.ref == 'refs/heads/main'
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')

View File

@@ -1,7 +1,7 @@
name: "Close Stale Issues"
on:
schedule:
- cron: "0 11 * * 2"
- cron: "0 7,9,11 * * 2"
workflow_dispatch:
jobs:

View File

@@ -13,10 +13,11 @@ jobs:
id: get-release-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
URL="https://zed.dev/releases/stable/latest"
fi
echo "URL=$URL" >> $GITHUB_OUTPUT
- name: Get content
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
@@ -38,28 +39,30 @@ jobs:
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
- name: Check if release was promoted from preview
id: check-promotion-from-preview
run: |
VERSION="${{ github.event.release.tag_name }}"
PREVIEW_TAG="${VERSION}-pre"
if git rev-parse "$PREVIEW_TAG" >/dev/null 2>&1; then
echo "was_preview=true" >> $GITHUB_OUTPUT
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
echo "was_promoted_from_preview=true" >> $GITHUB_OUTPUT
else
echo "was_preview=false" >> $GITHUB_OUTPUT
echo "was_promoted_from_preview=false" >> $GITHUB_OUTPUT
fi
- name: Send release notes email
if: steps.check-promotion-from-preview.outputs.was_preview == 'true'
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
run: |
TAG="${{ github.event.release.tag_name }}"
echo \"${{ toJSON(github.event.release.body) }}\" > release_body.txt
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
> release_data.json
curl -X POST "https://zed.dev/api/send_release_notes_email" \
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
-H "Content-Type: application/json" \
-d '{
"version": "${{ github.event.release.tag_name }}",
"markdown_body": ${{ toJSON(github.event.release.body) }}
}'
-d @release_data.json

View File

@@ -22,7 +22,7 @@ jobs:
version: 9
- name: Setup Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -37,35 +37,35 @@ jobs:
mdbook build ./docs --dest-dir=../target/deploy/docs/
- name: Deploy Docs
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: pages deploy target/deploy --project-name=docs
- name: Deploy Install
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
- name: Deploy Docs Workers
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Deploy Install Workers
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
command: deploy .cloudflare/docs-proxy/src/worker.js
- name: Preserve Wrangler logs
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
if: always()
with:
name: wrangler_logs

View File

@@ -18,7 +18,7 @@ jobs:
version: 9
- name: Setup Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "20"
cache: "pnpm"

View File

@@ -23,7 +23,7 @@ jobs:
- buildjet-16vcpu-ubuntu-2204
steps:
- name: Install Node
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"

View File

@@ -5,10 +5,8 @@ on:
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
push:
# tags:
# - "nightly"
branches:
- nix
tags:
- "nightly"
env:
CARGO_TERM_COLOR: always
@@ -30,147 +28,147 @@ jobs:
clean: false
fetch-depth: 0
# - name: Run style checks
# uses: ./.github/actions/check_style
- name: Run style checks
uses: ./.github/actions/check_style
# - name: Run clippy
# run: ./script/clippy
- name: Run clippy
run: ./script/clippy
# tests:
# timeout-minutes: 60
# name: Run tests
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - self-hosted
# - test
# needs: style
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
tests:
timeout-minutes: 60
name: Run tests
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- test
needs: style
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# - name: Run tests
# uses: ./.github/actions/run_tests
- name: Run tests
uses: ./.github/actions/run_tests
# bundle-mac:
# timeout-minutes: 60
# name: Create a macOS bundle
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - self-hosted
# - bundle
# needs: tests
# env:
# MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
# MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
# APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
# APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
# APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Install Node
# uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
# with:
# node-version: "18"
bundle-mac:
timeout-minutes: 60
name: Create a macOS bundle
if: github.repository_owner == 'zed-industries'
runs-on:
- self-hosted
- bundle
needs: tests
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Install Node
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with:
node-version: "18"
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# - name: Set release channel to nightly
# run: |
# set -eu
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Set release channel to nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Create macOS app bundle
# run: script/bundle-mac
- name: Create macOS app bundle
run: script/bundle-mac
# - name: Upload Zed Nightly
# run: script/upload-nightly macos
- name: Upload Zed Nightly
run: script/upload-nightly macos
# bundle-linux-x86:
# timeout-minutes: 60
# name: Create a Linux *.tar.gz bundle for x86
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - buildjet-16vcpu-ubuntu-2004
# needs: tests
# env:
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
bundle-linux-x86:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for x86
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2004
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# - name: Add Rust to the PATH
# run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
- name: Add Rust to the PATH
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
# - name: Install Linux dependencies
# run: ./script/linux && ./script/install-mold 2.34.0
- name: Install Linux dependencies
run: ./script/linux && ./script/install-mold 2.34.0
# - name: Limit target directory size
# run: script/clear-target-dir-if-larger-than 100
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
# - name: Set release channel to nightly
# run: |
# set -euo pipefail
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Create Linux .tar.gz bundle
# run: script/bundle-linux
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
# - name: Upload Zed Nightly
# run: script/upload-nightly linux-targz
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
# bundle-linux-arm:
# timeout-minutes: 60
# name: Create a Linux *.tar.gz bundle for ARM
# if: github.repository_owner == 'zed-industries'
# runs-on:
# - buildjet-16vcpu-ubuntu-2204-arm
# needs: tests
# env:
# DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
# DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
# ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
# ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# clean: false
bundle-linux-arm:
timeout-minutes: 60
name: Create a Linux *.tar.gz bundle for ARM
if: github.repository_owner == 'zed-industries'
runs-on:
- buildjet-16vcpu-ubuntu-2204-arm
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# - name: Install Linux dependencies
# run: ./script/linux
- name: Install Linux dependencies
run: ./script/linux
# - name: Limit target directory size
# run: script/clear-target-dir-if-larger-than 100
- name: Limit target directory size
run: script/clear-target-dir-if-larger-than 100
# - name: Set release channel to nightly
# run: |
# set -euo pipefail
# version=$(git rev-parse --short HEAD)
# echo "Publishing version: ${version} on release channel nightly"
# echo "nightly" > crates/zed/RELEASE_CHANNEL
- name: Set release channel to nightly
run: |
set -euo pipefail
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
# - name: Create Linux .tar.gz bundle
# run: script/bundle-linux
- name: Create Linux .tar.gz bundle
run: script/bundle-linux
# - name: Upload Zed Nightly
# run: script/upload-nightly linux-targz
- name: Upload Zed Nightly
run: script/upload-nightly linux-targz
bundle-nix:
timeout-minutes: 60
@@ -182,16 +180,17 @@ jobs:
system:
- os: x86 Linux
runner: buildjet-16vcpu-ubuntu-2204
install: true
install_nix: true
- os: arm Mac
# TODO: once other macs are provisioned for nix, remove that constraint from the runner
runner: [macOS, ARM64, nix]
install: false
install_nix: false
- os: arm Linux
runner: buildjet-16vcpu-ubuntu-2204-arm
install: true
install_nix: true
if: github.repository_owner == 'zed-industries'
runs-on: ${{ matrix.system.runner }}
# needs: tests
needs: tests
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
@@ -201,43 +200,49 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
clean: false
# on our macs we manually install nix. for some reason the cachix action is running
# under a non-login /bin/bash shell which doesn't source the proper script to add the
# nix profile to PATH, so we manually add them here
- name: Set path
if: ${{ ! matrix.system.install }}
if: ${{ ! matrix.system.install_nix }}
run: |
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
if: ${{ matrix.system.install }}
if: ${{ matrix.system.install_nix }}
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
with:
name: zed-industries
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
- run: nix build
- run: nix-collect-garbage -d
# update-nightly-tag:
# name: Update nightly tag
# if: github.repository_owner == 'zed-industries'
# runs-on: ubuntu-latest
# needs:
# - bundle-mac
# - bundle-linux-x86
# - bundle-linux-arm
# steps:
# - name: Checkout repo
# uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
# with:
# fetch-depth: 0
update-nightly-tag:
name: Update nightly tag
if: github.repository_owner == 'zed-industries'
runs-on: ubuntu-latest
needs:
- bundle-mac
- bundle-linux-x86
- bundle-linux-arm
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
fetch-depth: 0
# - name: Update nightly tag
# run: |
# if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
# echo "Nightly tag already points to current commit. Skipping tagging."
# exit 0
# fi
# git config user.name github-actions
# git config user.email github-actions@github.com
# git tag -f nightly
# git push origin nightly --force
- name: Update nightly tag
run: |
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
echo "Nightly tag already points to current commit. Skipping tagging."
exit 0
fi
git config user.name github-actions
git config user.email github-actions@github.com
git tag -f nightly
git push origin nightly --force

115
Cargo.lock generated
View File

@@ -244,7 +244,7 @@ dependencies = [
"serde",
"serde_json",
"strum",
"thiserror 1.0.69",
"thiserror 2.0.12",
"util",
]
@@ -467,6 +467,7 @@ dependencies = [
"futures 0.3.31",
"fuzzy",
"git",
"git_ui",
"gpui",
"heed",
"html_to_markdown",
@@ -728,6 +729,7 @@ dependencies = [
"settings",
"theme",
"ui",
"unindent",
"util",
"workspace",
"worktree",
@@ -793,9 +795,9 @@ dependencies = [
[[package]]
name = "async-compression"
version = "0.4.20"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "310c9bcae737a48ef5cdee3174184e6d548b292739ede61a1f955ef76a738861"
checksum = "c0cf008e5e1a9e9e22a7d3c9a4992e21a350290069e36d8fb72304ed17e8f2d2"
dependencies = [
"deflate64",
"flate2",
@@ -977,9 +979,9 @@ dependencies = [
[[package]]
name = "async-std"
version = "1.13.0"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615"
checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24"
dependencies = [
"async-attributes",
"async-channel 1.9.0",
@@ -1079,9 +1081,9 @@ dependencies = [
[[package]]
name = "async-trait"
version = "0.1.87"
version = "0.1.88"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97"
checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
dependencies = [
"proc-macro2",
"quote",
@@ -1841,7 +1843,7 @@ dependencies = [
"serde",
"serde_json",
"strum",
"thiserror 1.0.69",
"thiserror 2.0.12",
"tokio",
]
@@ -2354,7 +2356,7 @@ dependencies = [
"cap-primitives",
"cap-std",
"io-lifetimes",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -2382,7 +2384,7 @@ dependencies = [
"ipnet",
"maybe-owned",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
"winx",
]
@@ -2442,7 +2444,7 @@ dependencies = [
"semver",
"serde",
"serde_json",
"thiserror 2.0.6",
"thiserror 2.0.12",
]
[[package]]
@@ -2766,7 +2768,7 @@ dependencies = [
"telemetry",
"telemetry_events",
"text",
"thiserror 1.0.69",
"thiserror 2.0.12",
"time",
"tiny_http",
"tokio-socks",
@@ -2967,7 +2969,7 @@ dependencies = [
"telemetry_events",
"text",
"theme",
"thiserror 1.0.69",
"thiserror 2.0.12",
"time",
"tokio",
"toml 0.8.20",
@@ -4590,7 +4592,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5237,8 +5239,8 @@ dependencies = [
"fsevent",
"futures 0.3.31",
"git",
"git2",
"gpui",
"ignore",
"libc",
"log",
"notify 6.1.1",
@@ -5265,7 +5267,7 @@ checksum = "5e2e6123af26f0f2c51cc66869137080199406754903cc926a7690401ce09cb4"
dependencies = [
"io-lifetimes",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5600,11 +5602,13 @@ dependencies = [
"serde_json",
"smol",
"sum_tree",
"tempfile",
"text",
"time",
"unindent",
"url",
"util",
"uuid",
]
[[package]]
@@ -5680,7 +5684,6 @@ dependencies = [
"serde_derive",
"serde_json",
"settings",
"smallvec",
"strum",
"telemetry",
"theme",
@@ -5878,7 +5881,7 @@ dependencies = [
"strum",
"sum_tree",
"taffy",
"thiserror 1.0.69",
"thiserror 2.0.12",
"unicode-segmentation",
"usvg",
"util",
@@ -6923,7 +6926,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65"
dependencies = [
"io-lifetimes",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -7326,7 +7329,7 @@ dependencies = [
"smol",
"strum",
"telemetry_events",
"thiserror 1.0.69",
"thiserror 2.0.12",
"ui",
"util",
]
@@ -7569,7 +7572,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@@ -9688,7 +9691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
dependencies = [
"memchr",
"thiserror 2.0.6",
"thiserror 2.0.12",
"ucd-trie",
]
@@ -10960,7 +10963,7 @@ dependencies = [
"rustc-hash 2.1.1",
"rustls 0.23.23",
"socket2",
"thiserror 2.0.6",
"thiserror 2.0.12",
"tokio",
"tracing",
]
@@ -10979,7 +10982,7 @@ dependencies = [
"rustls 0.23.23",
"rustls-pki-types",
"slab",
"thiserror 2.0.6",
"thiserror 2.0.12",
"tinyvec",
"tracing",
"web-time",
@@ -10996,7 +10999,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -11388,7 +11391,7 @@ dependencies = [
"shlex",
"smol",
"tempfile",
"thiserror 1.0.69",
"thiserror 2.0.12",
"urlencoding",
"util",
]
@@ -11921,7 +11924,7 @@ dependencies = [
"libc",
"linux-raw-sys",
"once_cell",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -12468,9 +12471,9 @@ dependencies = [
[[package]]
name = "semver"
version = "1.0.25"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03"
checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
dependencies = [
"serde",
]
@@ -12703,6 +12706,13 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "shell_parser"
version = "0.1.0"
dependencies = [
"shlex",
]
[[package]]
name = "shellexpand"
version = "2.1.2"
@@ -13096,7 +13106,7 @@ dependencies = [
"serde_json",
"sha2",
"smallvec",
"thiserror 2.0.6",
"thiserror 2.0.12",
"time",
"tokio",
"tokio-stream",
@@ -13185,7 +13195,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.6",
"thiserror 2.0.12",
"time",
"tracing",
"uuid",
@@ -13228,7 +13238,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.6",
"thiserror 2.0.12",
"time",
"tracing",
"uuid",
@@ -13713,7 +13723,7 @@ dependencies = [
"fd-lock",
"io-lifetimes",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
"winx",
]
@@ -13857,7 +13867,7 @@ dependencies = [
"getrandom 0.3.1",
"once_cell",
"rustix",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -13902,7 +13912,7 @@ dependencies = [
"sysinfo",
"task",
"theme",
"thiserror 1.0.69",
"thiserror 2.0.12",
"util",
"windows 0.60.0",
]
@@ -13997,7 +14007,7 @@ dependencies = [
"serde_repr",
"settings",
"strum",
"thiserror 1.0.69",
"thiserror 2.0.12",
"util",
"uuid",
]
@@ -14063,11 +14073,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.6"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47"
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
dependencies = [
"thiserror-impl 2.0.6",
"thiserror-impl 2.0.12",
]
[[package]]
@@ -14083,9 +14093,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.6"
version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312"
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
dependencies = [
"proc-macro2",
"quote",
@@ -15023,6 +15033,19 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "ui_prompt"
version = "0.1.0"
dependencies = [
"gpui",
"markdown",
"menu",
"settings",
"theme",
"ui",
"workspace",
]
[[package]]
name = "unicase"
version = "2.8.1"
@@ -16238,7 +16261,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
@@ -16798,7 +16821,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
dependencies = [
"bitflags 2.8.0",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -17319,7 +17342,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.179.0"
version = "0.180.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -17382,7 +17405,6 @@ dependencies = [
"languages",
"libc",
"log",
"markdown",
"markdown_preview",
"menu",
"migrator",
@@ -17434,6 +17456,7 @@ dependencies = [
"tree-sitter-md",
"tree-sitter-rust",
"ui",
"ui_prompt",
"url",
"urlencoding",
"util",
@@ -17720,7 +17743,7 @@ dependencies = [
"telemetry",
"telemetry_events",
"theme",
"thiserror 1.0.69",
"thiserror 2.0.12",
"tree-sitter-go",
"tree-sitter-rust",
"ui",

View File

@@ -131,6 +131,7 @@ members = [
"crates/session",
"crates/settings",
"crates/settings_ui",
"crates/shell_parser",
"crates/snippet",
"crates/snippet_provider",
"crates/snippets_ui",
@@ -160,6 +161,7 @@ members = [
"crates/ui",
"crates/ui_input",
"crates/ui_macros",
"crates/ui_prompt",
"crates/util",
"crates/util_macros",
"crates/vim",
@@ -362,6 +364,7 @@ toolchain_selector = { path = "crates/toolchain_selector" }
ui = { path = "crates/ui" }
ui_input = { path = "crates/ui_input" }
ui_macros = { path = "crates/ui_macros" }
ui_prompt = { path = "crates/ui_prompt" }
util = { path = "crates/util" }
util_macros = { path = "crates/util_macros" }
vim = { path = "crates/vim" }
@@ -533,7 +536,7 @@ sys-locale = "0.3.1"
sysinfo = "0.31.0"
take-until = "0.2.0"
tempfile = "3.9.0"
thiserror = "1.0.29"
thiserror = "2.0.12"
tiktoken-rs = "0.6.0"
time = { version = "0.3", features = [
"macros",

View File

@@ -53,7 +53,9 @@
"context": "Prompt",
"bindings": {
"left": "menu::SelectPrevious",
"right": "menu::SelectNext"
"right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
}
},
{
@@ -752,6 +754,8 @@
"escape": "git_panel::ToggleFocus",
"ctrl-enter": "git::Commit",
"alt-enter": "menu::SecondaryConfirm",
"delete": "git::RestoreFile",
"shift-delete": "git::RestoreFile",
"backspace": "git::RestoreFile"
}
},

View File

@@ -705,6 +705,16 @@
"ctrl-]": "assistant::CycleNextInlineAssist"
}
},
{
"context": "Prompt",
"use_key_equivalents": true,
"bindings": {
"left": "menu::SelectPrevious",
"right": "menu::SelectNext",
"h": "menu::SelectPrevious",
"l": "menu::SelectNext"
}
},
{
"context": "ProjectSearchBar && !in_replace",
"use_key_equivalents": true,
@@ -791,6 +801,8 @@
"shift-tab": "git_panel::FocusEditor",
"escape": "git_panel::ToggleFocus",
"cmd-enter": "git::Commit",
"delete": "git::RestoreFile",
"cmd-backspace": "git::RestoreFile",
"backspace": "git::RestoreFile"
}
},

View File

@@ -136,6 +136,11 @@
// Whether to use the system provided dialogs for Open and Save As.
// When set to false, Zed will use the built-in keyboard-first pickers.
"use_system_path_prompts": true,
// Whether to use the system provided dialogs for prompts, such as confirmation
// prompts.
// When set to false, Zed will use its built-in prompts. Note that on Linux,
// this option is ignored and Zed will always use the built-in prompts.
"use_system_prompts": true,
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// Cursor shape for the default editor.

View File

@@ -63,9 +63,9 @@ impl ActivityIndicator {
let auto_updater = AutoUpdater::get(cx);
let this = cx.new(|cx| {
let mut status_events = languages.language_server_binary_statuses();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
while let Some((name, status)) = status_events.next().await {
this.update(&mut cx, |this: &mut ActivityIndicator, cx| {
this.update(cx, |this: &mut ActivityIndicator, cx| {
this.statuses.retain(|s| s.name != name);
this.statuses.push(ServerStatus { name, status });
cx.notify();
@@ -76,9 +76,9 @@ impl ActivityIndicator {
.detach();
let mut status_events = languages.dap_server_binary_statuses();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
while let Some((name, status)) = status_events.next().await {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.statuses.retain(|s| s.name != name);
this.statuses.push(ServerStatus { name, status });
cx.notify();
@@ -123,9 +123,9 @@ impl ActivityIndicator {
let project = project.clone();
let error = error.clone();
let server_name = server_name.clone();
cx.spawn_in(window, |workspace, mut cx| async move {
cx.spawn_in(window, async move |workspace, cx| {
let buffer = create_buffer.await?;
buffer.update(&mut cx, |buffer, cx| {
buffer.update(cx, |buffer, cx| {
buffer.edit(
[(
0..0,
@@ -136,7 +136,7 @@ impl ActivityIndicator {
);
buffer.set_capability(language::Capability::ReadOnly, cx);
})?;
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(
Box::new(cx.new(|cx| {
Editor::for_buffer(buffer, Some(project.clone()), window, cx)

View File

@@ -34,9 +34,9 @@ impl AskPassDelegate {
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
) -> Self {
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
let task = cx.spawn(|mut cx| async move {
let task = cx.spawn(async move |cx: &mut AsyncApp| {
while let Some((prompt, channel)) = rx.next().await {
password_prompt(prompt, channel, &mut cx);
password_prompt(prompt, channel, cx);
}
});
Self { tx, _task: task }

View File

@@ -98,9 +98,9 @@ pub fn init(
AssistantSettings::register(cx);
SlashCommandSettings::register(cx);
cx.spawn(|mut cx| {
cx.spawn({
let client = client.clone();
async move {
async move |cx| {
let is_search_slash_command_enabled = cx
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
.await;
@@ -116,7 +116,7 @@ pub fn init(
let semantic_index = SemanticDb::new(
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
Arc::new(embedding_provider),
&mut cx,
cx,
)
.await?;

View File

@@ -98,16 +98,16 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
let context_store = workspace
.update(&mut cx, |workspace, cx| {
.update(cx, |workspace, cx| {
let project = workspace.project().clone();
ContextStore::new(project, prompt_builder.clone(), slash_commands, cx)
})?
.await?;
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
// TODO: deserialize state.
cx.new(|cx| Self::new(workspace, context_store, window, cx))
})
@@ -357,9 +357,9 @@ impl AssistantPanel {
) -> Task<()> {
let mut status_rx = client.status();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
while let Some(status) = status_rx.next().await {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if this.client_status.is_none()
|| this
.client_status
@@ -371,7 +371,7 @@ impl AssistantPanel {
})
.log_err();
}
this.update(&mut cx, |this, _cx| this.watch_client_status = None)
this.update(cx, |this, _cx| this.watch_client_status = None)
.log_err();
})
}
@@ -576,11 +576,11 @@ impl AssistantPanel {
if self.authenticate_provider_task.is_none() {
self.authenticate_provider_task = Some((
provider.id(),
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
if let Some(future) = load_credentials {
let _ = future.await;
}
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.authenticate_provider_task = None;
})
.log_err();
@@ -641,9 +641,9 @@ impl AssistantPanel {
}
} else {
let assistant_panel = assistant_panel.downgrade();
cx.spawn_in(window, |workspace, mut cx| async move {
cx.spawn_in(window, async move |workspace, cx| {
let Some(task) =
assistant_panel.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
assistant_panel.update(cx, |assistant, cx| assistant.authenticate(cx))?
else {
let answer = cx
.prompt(
@@ -665,7 +665,7 @@ impl AssistantPanel {
return Ok(());
};
task.await?;
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? {
if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx))? {
cx.update(|window, cx| match inline_assist_target {
InlineAssistTarget::Editor(active_editor, include_context) => {
let assistant_panel = if include_context {
@@ -698,7 +698,7 @@ impl AssistantPanel {
}
})?
} else {
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx)
})?;
}
@@ -791,10 +791,10 @@ impl AssistantPanel {
.context_store
.update(cx, |store, cx| store.create_remote_context(cx));
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let context = task.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
let workspace = this.workspace.clone();
let project = this.project.clone();
let lsp_adapter_delegate =
@@ -847,9 +847,9 @@ impl AssistantPanel {
self.show_context(editor.clone(), window, cx);
let workspace = self.workspace.clone();
cx.spawn_in(window, move |_, mut cx| async move {
cx.spawn_in(window, async move |_, cx| {
workspace
.update_in(&mut cx, |workspace, window, cx| {
.update_in(cx, |workspace, window, cx| {
workspace.focus_panel::<AssistantPanel>(window, cx);
})
.ok();
@@ -1069,8 +1069,8 @@ impl AssistantPanel {
.filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path))
});
if let Some(existing_context) = existing_context {
return cx.spawn_in(window, |this, mut cx| async move {
this.update_in(&mut cx, |this, window, cx| {
return cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
this.show_context(existing_context, window, cx)
})
});
@@ -1085,9 +1085,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let context = context.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,
@@ -1117,8 +1117,8 @@ impl AssistantPanel {
.filter(|editor| *editor.read(cx).context().read(cx).id() == id)
});
if let Some(existing_context) = existing_context {
return cx.spawn_in(window, |this, mut cx| async move {
this.update_in(&mut cx, |this, window, cx| {
return cx.spawn_in(window, async move |this, cx| {
this.update_in(cx, |this, window, cx| {
this.show_context(existing_context.clone(), window, cx)
})?;
Ok(existing_context)
@@ -1134,9 +1134,9 @@ impl AssistantPanel {
.log_err()
.flatten();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let context = context.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,

View File

@@ -1311,9 +1311,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(),
scroll_lock: None,
highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn(|cx| {
_update_highlights: cx.spawn({
let editor = editor.downgrade();
async move {
async move |cx| {
while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1850,7 +1850,7 @@ impl PromptEditor {
fn count_tokens(&mut self, cx: &mut Context<Self>) {
let assist_id = self.id;
self.pending_token_count = cx.spawn(|this, mut cx| async move {
self.pending_token_count = cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(1)).await;
let token_count = cx
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
@@ -1862,7 +1862,7 @@ impl PromptEditor {
})??
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.token_counts = Some(token_count);
cx.notify();
})
@@ -2882,7 +2882,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
self.request = Some(request.clone());
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -2999,213 +2999,207 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone();
self.generation = cx.spawn(|codegen, mut cx| {
async move {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
self.generation = cx.spawn(async move |codegen, cx| {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
line_indent = None;
first_line = false;
}
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})?;
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
anyhow::Ok(())
};
let result = diff.await;
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
let error_message = result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
codegen
.update(&mut cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})
.ok();
}
})?;
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
});
cx.notify();
}
@@ -3323,7 +3317,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot);
cx.spawn(|codegen, mut cx| async move {
cx.spawn(async move |codegen, cx| {
let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move {
let old_text = old_snapshot
@@ -3373,7 +3367,7 @@ impl CodegenAlternative {
.await;
codegen
.update(&mut cx, |codegen, cx| {
.update(cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify();
@@ -3587,10 +3581,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
) -> Task<Result<ProjectTransaction>> {
let editor = self.editor.clone();
let workspace = self.workspace.clone();
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let editor = editor.upgrade().context("editor was released")?;
let range = editor
.update(&mut cx, |editor, cx| {
.update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx);
@@ -3625,7 +3619,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
})
})?
.context("invalid range")?;
let assistant_panel = workspace.update(&mut cx, |workspace, cx| {
let assistant_panel = workspace.update(cx, |workspace, cx| {
workspace
.panel::<AssistantPanel>(cx)
.context("assistant panel was released")

View File

@@ -825,7 +825,7 @@ impl PromptEditor {
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
return;
};
self.pending_token_count = cx.spawn(|this, mut cx| async move {
self.pending_token_count = cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(1)).await;
let request =
cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| {
@@ -833,7 +833,7 @@ impl PromptEditor {
})??;
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.token_count = Some(token_count);
cx.notify();
})
@@ -1140,7 +1140,7 @@ impl Codegen {
let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(|this, mut cx| async move {
self.generation = cx.spawn(async move |this, cx| {
let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await;
@@ -1197,12 +1197,12 @@ impl Codegen {
}
});
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.message_id = message_id;
})?;
while let Some(hunk) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx);
cx.notify();
@@ -1216,7 +1216,7 @@ impl Codegen {
let result = generate.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {

View File

@@ -39,6 +39,7 @@ fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
git.workspace = true
git_ui.workspace = true
gpui.workspace = true
heed.workspace = true
html_to_markdown.workspace = true

View File

@@ -372,10 +372,10 @@ impl ActiveThread {
cx,
);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let updated_context_ids = refresh_task.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.context_store.read_with(cx, |context_store, cx| {
context_store
.context()
@@ -394,10 +394,10 @@ impl ActiveThread {
let model_registry = LanguageModelRegistry::read_global(cx);
if let Some(model) = model_registry.active_model() {
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let updated_context = context_update_task.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.thread.update(cx, |thread, cx| {
thread.attach_tool_results(updated_context, cx);
if !canceled {
@@ -418,9 +418,9 @@ impl ActiveThread {
/// Only one task to save the thread will be in flight at a time.
fn save_thread(&mut self, cx: &mut Context<Self>) {
let thread = self.thread.clone();
self.save_thread_task = Some(cx.spawn(|this, mut cx| async move {
self.save_thread_task = Some(cx.spawn(async move |this, cx| {
let task = this
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
this.thread_store
.update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx))
})
@@ -550,6 +550,7 @@ impl ActiveThread {
let thread = self.thread.read(cx);
// Get all the data we need from thread before we start using it in closures
let checkpoint = thread.checkpoint_for_message(message_id);
let context = thread.context_for_message(message_id);
let tool_uses = thread.tool_uses_for_message(message_id);
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
@@ -583,7 +584,7 @@ impl ActiveThread {
.p_2p5()
.child(edit_message_editor)
} else {
div().p_2p5().text_ui(cx).child(markdown.clone())
div().text_ui(cx).child(markdown.clone())
},
)
.when_some(context, |parent, context| {
@@ -603,15 +604,16 @@ impl ActiveThread {
let styled_message = match message.role {
Role::User => v_flex()
.id(("message-container", ix))
.pt_2p5()
.px_2p5()
.pt_2()
.pl_2()
.pr_2p5()
.child(
v_flex()
.bg(colors.editor_background)
.rounded_lg()
.border_1()
.border_color(colors.border)
.shadow_sm()
.shadow_md()
.child(
h_flex()
.py_1()
@@ -702,12 +704,12 @@ impl ActiveThread {
},
),
)
.child(message_content),
.child(div().p_2().child(message_content)),
),
Role::Assistant => {
v_flex()
.id(("message-container", ix))
.child(message_content)
.child(div().py_3().px_4().child(message_content))
.when(
!tool_uses.is_empty() || !scripting_tool_uses.is_empty(),
|parent| {
@@ -729,11 +731,29 @@ impl ActiveThread {
v_flex()
.bg(colors.editor_background)
.rounded_sm()
.child(message_content),
.child(div().p_4().child(message_content)),
),
};
styled_message.into_any()
v_flex()
.when_some(checkpoint, |parent, checkpoint| {
parent.child(
h_flex().pl_2().child(
Button::new("restore-checkpoint", "Restore Checkpoint")
.icon(IconName::Undo)
.size(ButtonSize::Compact)
.on_click(cx.listener(move |this, _, _window, cx| {
this.thread.update(cx, |thread, cx| {
thread
.restore_checkpoint(checkpoint.clone(), cx)
.detach_and_log_err(cx);
});
})),
),
)
})
.child(styled_message)
.into_any()
}
fn render_tool_use(&self, tool_use: ToolUse, cx: &mut Context<Self>) -> impl IntoElement {
@@ -745,7 +765,7 @@ impl ActiveThread {
let lighter_border = cx.theme().colors().border.opacity(0.5);
div().px_2p5().child(
div().px_4().child(
v_flex()
.rounded_lg()
.border_1()

View File

@@ -0,0 +1,59 @@
use std::sync::Arc;
use collections::HashMap;
use gpui::SharedString;
/// A profile for the Zed Agent that controls its behavior.
#[derive(Debug, Clone)]
pub struct AgentProfile {
/// The name of the profile.
pub name: SharedString,
pub tools: HashMap<Arc<str>, bool>,
#[allow(dead_code)]
pub context_servers: HashMap<Arc<str>, ContextServerPreset>,
}
#[derive(Debug, Clone)]
pub struct ContextServerPreset {
#[allow(dead_code)]
pub tools: HashMap<Arc<str>, bool>,
}
impl AgentProfile {
pub fn read_only() -> Self {
Self {
name: "Read-only".into(),
tools: HashMap::from_iter([
("diagnostics".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
pub fn code_writer() -> Self {
Self {
name: "Code Writer".into(),
tools: HashMap::from_iter([
("bash".into(), true),
("delete-path".into(), true),
("diagnostics".into(), true),
("edit-files".into(), true),
("fetch".into(), true),
("list-directory".into(), true),
("now".into(), true),
("path-search".into(), true),
("read-file".into(), true),
("regex-search".into(), true),
("thinking".into(), true),
]),
context_servers: HashMap::default(),
}
}
}

View File

@@ -1,4 +1,5 @@
mod active_thread;
mod agent_profile;
mod assistant_configuration;
mod assistant_model_selector;
mod assistant_panel;

View File

@@ -1,19 +1,33 @@
use std::sync::Arc;
use assistant_tool::{ToolSource, ToolWorkingSet};
use collections::HashMap;
use gpui::{Action, AnyView, App, EventEmitter, FocusHandle, Focusable, Subscription};
use context_server::manager::ContextServerManager;
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
use ui::{prelude::*, Divider, DividerColor, ElevationIndex};
use ui::{
prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip,
};
use util::ResultExt as _;
use zed_actions::assistant::DeployPromptLibrary;
use zed_actions::ExtensionCategoryFilter;
pub struct AssistantConfiguration {
focus_handle: FocusHandle,
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
context_server_manager: Entity<ContextServerManager>,
expanded_context_server_tools: HashMap<Arc<str>, bool>,
tools: Arc<ToolWorkingSet>,
_registry_subscription: Subscription,
}
impl AssistantConfiguration {
pub fn new(window: &mut Window, cx: &mut Context<Self>) -> Self {
pub fn new(
context_server_manager: Entity<ContextServerManager>,
tools: Arc<ToolWorkingSet>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle();
let registry_subscription = cx.subscribe_in(
@@ -36,6 +50,9 @@ impl AssistantConfiguration {
let mut this = Self {
focus_handle,
configuration_views_by_provider: HashMap::default(),
context_server_manager,
expanded_context_server_tools: HashMap::default(),
tools,
_registry_subscription: registry_subscription,
};
this.build_provider_configuration_views(window, cx);
@@ -143,6 +160,185 @@ impl AssistantConfiguration {
}),
)
}
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
let tools_by_source = self.tools.tools_by_source(cx);
let empty = Vec::new();
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.mt_1()
.gap_2()
.flex_1()
.child(
v_flex()
.gap_0p5()
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
.child(Label::new(SUBHEADING).color(Color::Muted)),
)
.children(context_servers.into_iter().map(|context_server| {
let is_running = context_server.client().is_some();
let are_tools_expanded = self
.expanded_context_server_tools
.get(&context_server.id())
.copied()
.unwrap_or_default();
let tools = tools_by_source
.get(&ToolSource::ContextServer {
id: context_server.id().into(),
})
.unwrap_or_else(|| &empty);
let tool_count = tools.len();
v_flex()
.border_1()
.rounded_sm()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().editor_background)
.child(
h_flex()
.justify_between()
.px_2()
.py_1()
.when(are_tools_expanded, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(
h_flex()
.gap_2()
.child(
Disclosure::new("tool-list-disclosure", are_tools_expanded)
.on_click(cx.listener({
let context_server_id = context_server.id();
move |this, _event, _window, _cx| {
let is_open = this
.expanded_context_server_tools
.entry(context_server_id.clone())
.or_insert(false);
*is_open = !*is_open;
}
})),
)
.child(Indicator::dot().color(if is_running {
Color::Success
} else {
Color::Error
}))
.child(Label::new(context_server.id()))
.child(
Label::new(format!("{tool_count} tools"))
.color(Color::Muted),
),
)
.child(h_flex().child(
Switch::new("context-server-switch", is_running.into()).on_click({
let context_server_manager =
self.context_server_manager.clone();
let context_server = context_server.clone();
move |state, _window, cx| match state {
ToggleState::Unselected | ToggleState::Indeterminate => {
context_server_manager.update(cx, |this, cx| {
this.stop_server(context_server.clone(), cx)
.log_err();
});
}
ToggleState::Selected => {
cx.spawn({
let context_server_manager =
context_server_manager.clone();
let context_server = context_server.clone();
async move |cx| {
if let Some(start_server_task) =
context_server_manager
.update(cx, |this, cx| {
this.start_server(
context_server,
cx,
)
})
.log_err()
{
start_server_task.await.log_err();
}
}
})
.detach();
}
}
}),
)),
)
.map(|parent| {
if !are_tools_expanded {
return parent;
}
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|(ix, tool)| {
h_flex()
.px_2()
.py_1()
.when(ix < tool_count - 1, |element| {
element
.border_b_1()
.border_color(cx.theme().colors().border)
})
.child(Label::new(tool.name()))
},
)))
})
}))
.child(
h_flex()
.justify_between()
.gap_2()
.child(
h_flex().w_full().child(
Button::new("add-context-server", "Add Context Server")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.disabled(true)
.tooltip(Tooltip::text("Not yet implemented")),
),
)
.child(
h_flex().w_full().child(
Button::new(
"install-context-server-extensions",
"Install Context Server Extensions",
)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.full_width()
.icon(IconName::DatabaseZap)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.on_click(|_event, window, cx| {
window.dispatch_action(
zed_actions::Extensions {
category_filter: Some(
ExtensionCategoryFilter::ContextServers,
),
}
.boxed_clone(),
cx,
)
}),
),
),
)
}
}
impl Render for AssistantConfiguration {
@@ -182,6 +378,8 @@ impl Render for AssistantConfiguration {
),
)
.child(Divider::horizontal().color(DividerColor::Border))
.child(self.render_context_servers_section(cx))
.child(Divider::horizontal().color(DividerColor::Border))
.child(
v_flex()
.p(DynamicSpacing::Base16.rems(cx))

View File

@@ -110,19 +110,16 @@ impl AssistantPanel {
prompt_builder: Arc<PromptBuilder>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let tools = Arc::new(ToolWorkingSet::default());
log::info!("[assistant2-debug] initializing ThreadStore");
let thread_store = workspace.update(&mut cx, |workspace, cx| {
let thread_store = workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
})??;
log::info!("[assistant2-debug] finished initializing ThreadStore");
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
log::info!("[assistant2-debug] initializing ContextStore");
let context_store = workspace
.update(&mut cx, |workspace, cx| {
.update(cx, |workspace, cx| {
let project = workspace.project().clone();
assistant_context_editor::ContextStore::new(
project,
@@ -132,9 +129,8 @@ impl AssistantPanel {
)
})?
.await?;
log::info!("[assistant2-debug] finished initializing ContextStore");
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
})
})
@@ -147,7 +143,6 @@ impl AssistantPanel {
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
log::info!("[assistant2-debug] AssistantPanel::new");
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
let fs = workspace.app_state().fs.clone();
let project = workspace.project().clone();
@@ -349,9 +344,9 @@ impl AssistantPanel {
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let context = context.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
let editor = cx.new(|cx| {
ContextEditor::for_context(
context,
@@ -382,9 +377,9 @@ impl AssistantPanel {
.thread_store
.update(cx, |this, cx| this.open_thread(thread_id, cx));
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let thread = open_thread_task.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
this.active_view = ActiveView::Thread;
let message_editor_context_store =
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
@@ -415,8 +410,13 @@ impl AssistantPanel {
}
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let context_server_manager = self.thread_store.read(cx).context_server_manager();
let tools = self.thread_store.read(cx).tools();
self.active_view = ActiveView::Configuration;
self.configuration = Some(cx.new(|cx| AssistantConfiguration::new(window, cx)));
self.configuration = Some(
cx.new(|cx| AssistantConfiguration::new(context_server_manager, tools, window, cx)),
);
if let Some(configuration) = self.configuration.as_ref() {
self.configuration_subscription = Some(cx.subscribe_in(
@@ -450,10 +450,10 @@ impl AssistantPanel {
.languages
.language_for_name("Markdown");
let thread = self.active_thread(cx);
cx.spawn_in(window, |_this, mut cx| async move {
cx.spawn_in(window, async move |_this, cx| {
let markdown_language = markdown_language_task.await?;
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
let thread = thread.read(cx);
let markdown = thread.to_markdown()?;
let thread_summary = thread

View File

@@ -367,7 +367,7 @@ impl CodegenAlternative {
let request = self.build_request(user_prompt, cx)?;
self.request = Some(request.clone());
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
.boxed_local()
};
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
@@ -480,213 +480,207 @@ impl CodegenAlternative {
let completion = Arc::new(Mutex::new(String::new()));
let completion_clone = completion.clone();
self.generation = cx.spawn(|codegen, mut cx| {
async move {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
self.generation = cx.spawn(async move |codegen, cx| {
let stream = stream.await;
let message_id = stream
.as_ref()
.ok()
.and_then(|stream| stream.message_id.clone());
let generate = async {
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
let executor = cx.background_executor().clone();
let message_id = message_id.clone();
let line_based_stream_diff: Task<anyhow::Result<()>> =
cx.background_spawn(async move {
let mut response_latency = None;
let request_start = Instant::now();
let diff = async {
let chunks = StripInvalidSpans::new(stream?.stream);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut line_diff = LineDiff::default();
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
while let Some(chunk) = chunks.next().await {
if response_latency.is_none() {
response_latency = Some(request_start.elapsed());
}
let chunk = chunk?;
completion_clone.lock().push_str(&chunk);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta =
line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(
selection_start.column as usize,
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
let char_ops = diff.push_new(&new_text);
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
if lines.peek().is_some() {
let char_ops = diff.push_new("\n");
line_diff
.push_char_operations(&char_ops, &selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
if line_indent.is_none() {
// Don't write out the leading indentation in empty lines on the next line
// This is the case where the above if statement didn't clear the buffer
new_text.clear();
}
line_indent = None;
first_line = false;
}
line_indent = None;
first_line = false;
}
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
anyhow::Ok(())
};
let result = diff.await;
let error_message =
result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(&mut cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})?;
}
let mut char_ops = diff.push_new(&new_text);
char_ops.extend(diff.finish());
line_diff.push_char_operations(&char_ops, &selected_text);
line_diff.finish(&selected_text);
diff_tx
.send((char_ops, line_diff.line_operations()))
.await?;
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) =
join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
anyhow::Ok(())
};
let result = diff.await;
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
let error_message = result.as_ref().err().map(|error| error.to_string());
report_assistant_event(
AssistantEvent {
conversation_id: None,
message_id,
kind: AssistantKind::Inline,
phase: AssistantPhase::Response,
model: model_telemetry_id,
model_provider: model_provider_id.to_string(),
response_latency,
error_message,
language_name: language_name.map(|name| name.to_proto()),
},
telemetry,
http_client,
model_api_key,
&executor,
);
codegen
.update(&mut cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
result?;
Ok(())
});
while let Some((char_ops, line_ops)) = diff_rx.next().await {
codegen.update(cx, |codegen, cx| {
codegen.last_equal_ranges.clear();
let edits = char_ops
.into_iter()
.filter_map(|operation| match operation {
CharOperation::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
CharOperation::Delete { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
CharOperation::Keep { bytes } => {
let edit_end = edit_start + bytes;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
codegen.last_equal_ranges.push(edit_range);
None
}
})
.collect::<Vec<_>>();
if codegen.active {
codegen.apply_edits(edits.iter().cloned(), cx);
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
codegen.edits.extend(edits);
codegen.line_operations = line_ops;
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
cx.notify();
})
.ok();
}
})?;
}
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
let batch_diff_task =
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
line_based_stream_diff?;
anyhow::Ok(())
};
let result = generate.await;
let elapsed_time = start_time.elapsed().as_secs_f64();
codegen
.update(cx, |this, cx| {
this.message_id = message_id;
this.last_equal_ranges.clear();
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {
this.status = CodegenStatus::Done;
}
this.elapsed_time = Some(elapsed_time);
this.completion = Some(completion.lock().clone());
cx.emit(CodegenEvent::Finished);
cx.notify();
})
.ok();
});
cx.notify();
}
@@ -804,7 +798,7 @@ impl CodegenAlternative {
let new_snapshot = self.buffer.read(cx).snapshot(cx);
let new_range = self.range.to_point(&new_snapshot);
cx.spawn(|codegen, mut cx| async move {
cx.spawn(async move |codegen, cx| {
let (deleted_row_ranges, inserted_row_ranges) = cx
.background_spawn(async move {
let old_text = old_snapshot
@@ -854,7 +848,7 @@ impl CodegenAlternative {
.await;
codegen
.update(&mut cx, |codegen, cx| {
.update(cx, |codegen, cx| {
codegen.diff.deleted_row_ranges = deleted_row_ranges;
codegen.diff.inserted_row_ranges = inserted_row_ranges;
cx.notify();

View File

@@ -281,10 +281,8 @@ impl ContextPicker {
context_store.add_file_from_path(project_path.clone(), cx)
});
cx.spawn_in(window, |_, mut cx| async move {
task.await.notify_async_err(&mut cx)
})
.detach();
cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
.detach();
cx.notify();
}
@@ -307,13 +305,13 @@ impl ContextPicker {
};
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx));
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let thread = open_thread_task.await?;
context_store.update(&mut cx, |context_store, cx| {
context_store.update(cx, |context_store, cx| {
context_store.add_thread(thread, cx);
})?;
this.update(&mut cx, |_this, cx| cx.notify())
this.update(cx, |_this, cx| cx.notify())
})
}

View File

@@ -206,12 +206,12 @@ impl PickerDelegate for FetchContextPickerDelegate {
let http_client = workspace.read(cx).client().http_client().clone();
let url = self.url.clone();
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let text = cx
.background_spawn(Self::build_message(http_client, url.clone()))
.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
this.delegate
.context_store
.update(cx, |context_store, _cx| {

View File

@@ -206,11 +206,11 @@ impl PickerDelegate for FileContextPickerDelegate {
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
// TODO: This should be probably be run in the background.
let paths = search_task.await;
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.delegate.matches = paths;
})
.log_err();
@@ -345,10 +345,10 @@ impl PickerDelegate for FileContextPickerDelegate {
};
let confirm_behavior = self.confirm_behavior;
cx.spawn_in(window, |this, mut cx| async move {
match task.await.notify_async_err(&mut cx) {
cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(cx) {
None => anyhow::Ok(()),
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
ConfirmBehavior::KeepOpen => {}
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
}),

View File

@@ -149,9 +149,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
}
});
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let matches = search_task.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.delegate.matches = matches;
this.delegate.selected_index = 0;
cx.notify();
@@ -171,9 +171,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let thread = open_thread_task.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
this.delegate
.context_store
.update(cx, |context_store, cx| context_store.add_thread(thread, cx))

View File

@@ -75,15 +75,15 @@ impl ContextStore {
return Task::ready(Err(anyhow!("failed to read project")));
};
cx.spawn(|this, mut cx| async move {
let open_buffer_task = project.update(&mut cx, |project, cx| {
cx.spawn(async move |this, cx| {
let open_buffer_task = project.update(cx, |project, cx| {
project.open_buffer(project_path.clone(), cx)
})?;
let buffer_entity = open_buffer_task.await?;
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let buffer_id = this.update(cx, |_, cx| buffer_entity.read(cx).remote_id())?;
let already_included = this.update(&mut cx, |this, _cx| {
let already_included = this.update(cx, |this, _cx| {
match this.will_include_buffer(buffer_id, &project_path.path) {
Some(FileInclusion::Direct(context_id)) => {
this.remove_context(context_id);
@@ -98,7 +98,7 @@ impl ContextStore {
return anyhow::Ok(());
}
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
let (buffer_info, text_task) = this.update(cx, |_, cx| {
let buffer = buffer_entity.read(cx);
collect_buffer_info_and_text(
project_path.path.clone(),
@@ -110,7 +110,7 @@ impl ContextStore {
let text = text_task.await;
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text));
})?;
@@ -123,8 +123,8 @@ impl ContextStore {
buffer_entity: Entity<Buffer>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move {
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
cx.spawn(async move |this, cx| {
let (buffer_info, text_task) = this.update(cx, |_, cx| {
let buffer = buffer_entity.read(cx);
let Some(file) = buffer.file() else {
return Err(anyhow!("Buffer has no path."));
@@ -139,7 +139,7 @@ impl ContextStore {
let text = text_task.await;
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.insert_file(make_context_buffer(buffer_info, text))
})?;
@@ -179,18 +179,18 @@ impl ContextStore {
}
let worktree_id = project_path.worktree_id;
cx.spawn(|this, mut cx| async move {
let worktree = project.update(&mut cx, |project, cx| {
cx.spawn(async move |this, cx| {
let worktree = project.update(cx, |project, cx| {
project
.worktree_for_id(worktree_id, cx)
.ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}"))
})??;
let files = worktree.update(&mut cx, |worktree, _cx| {
let files = worktree.update(cx, |worktree, _cx| {
collect_files_in_path(worktree, &project_path.path)
})?;
let open_buffers_task = project.update(&mut cx, |project, cx| {
let open_buffers_task = project.update(cx, |project, cx| {
let tasks = files.iter().map(|file_path| {
project.open_buffer(
ProjectPath {
@@ -207,7 +207,7 @@ impl ContextStore {
let mut buffer_infos = Vec::new();
let mut text_tasks = Vec::new();
this.update(&mut cx, |_, cx| {
this.update(cx, |_, cx| {
for (path, buffer_entity) in files.into_iter().zip(buffers) {
// Skip all binary files and other non-UTF8 files
if let Ok(buffer_entity) = buffer_entity {
@@ -236,7 +236,7 @@ impl ContextStore {
bail!("No text files found in {}", &project_path.path.display());
}
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.insert_directory(&project_path.path, context_buffers);
})?;
@@ -595,10 +595,10 @@ fn refresh_file_text(
let id = file_context.id;
let task = refresh_context_buffer(&file_context.context_buffer, cx);
if let Some(task) = task {
Some(cx.spawn(|mut cx| async move {
Some(cx.spawn(async move |cx| {
let context_buffer = task.await;
context_store
.update(&mut cx, |context_store, _| {
.update(cx, |context_store, _| {
let new_file_context = FileContext { id, context_buffer };
context_store.replace_context(AssistantContext::File(new_file_context));
})
@@ -636,10 +636,10 @@ fn refresh_directory_text(
let id = directory_context.snapshot.id;
let path = directory_context.path.clone();
Some(cx.spawn(|mut cx| async move {
Some(cx.spawn(async move |cx| {
let context_buffers = context_buffers.await;
context_store
.update(&mut cx, |context_store, _| {
.update(cx, |context_store, _| {
let new_directory_context = DirectoryContext::new(id, &path, context_buffers);
context_store.replace_context(AssistantContext::Directory(new_directory_context));
})
@@ -654,9 +654,9 @@ fn refresh_thread_text(
) -> Task<()> {
let id = thread_context.id;
let thread = thread_context.thread.clone();
cx.spawn(move |mut cx| async move {
cx.spawn(async move |cx| {
context_store
.update(&mut cx, |context_store, cx| {
.update(cx, |context_store, cx| {
let text = thread.read(cx).text().into();
context_store.replace_context(AssistantContext::Thread(ThreadContext {
id,

View File

@@ -335,12 +335,12 @@ impl ContextStrip {
context_store.accept_suggested_context(&suggested, cx)
});
cx.spawn_in(window, |this, mut cx| async move {
match task.await.notify_async_err(&mut cx) {
cx.spawn_in(window, async move |this, cx| {
match task.await.notify_async_err(cx) {
None => {}
Some(()) => {
if let Some(this) = this.upgrade() {
this.update(&mut cx, |_, cx| cx.notify())?;
this.update(cx, |_, cx| cx.notify())?;
}
}
}

View File

@@ -276,7 +276,7 @@ impl InlineAssistant {
if is_authenticated() {
handle_assist(window, cx);
} else {
cx.spawn_in(window, |_workspace, mut cx| async move {
cx.spawn_in(window, async move |_workspace, cx| {
let Some(task) = cx.update(|_, cx| {
LanguageModelRegistry::read_global(cx)
.active_provider()
@@ -1456,9 +1456,9 @@ impl EditorInlineAssists {
assist_ids: Vec::new(),
scroll_lock: None,
highlight_updates: highlight_updates_tx,
_update_highlights: cx.spawn(|cx| {
_update_highlights: cx.spawn({
let editor = editor.downgrade();
async move {
async move |cx| {
while let Ok(()) = highlight_updates_rx.changed().await {
let editor = editor.upgrade().context("editor was dropped")?;
cx.update_global(|assistant: &mut InlineAssistant, cx| {
@@ -1748,10 +1748,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let editor = self.editor.clone();
let workspace = self.workspace.clone();
let thread_store = self.thread_store.clone();
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let editor = editor.upgrade().context("editor was released")?;
let range = editor
.update(&mut cx, |editor, cx| {
.update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
let buffer = buffer.read(cx);
let multibuffer_snapshot = multibuffer.read(cx);

View File

@@ -3,23 +3,25 @@ use std::sync::Arc;
use collections::HashSet;
use editor::actions::MoveUp;
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
use file_icons::FileIcons;
use fs::Fs;
use git::ExpandCommitEditor;
use git_ui::git_panel;
use gpui::{
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
WeakEntity,
};
use language_model::LanguageModelRegistry;
use language_model_selector::ToggleModelSelector;
use project::Project;
use rope::Point;
use settings::Settings;
use std::time::Duration;
use text::Bias;
use theme::ThemeSettings;
use ui::{
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle,
Tooltip,
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
};
use util::ResultExt;
use vim_mode_setting::VimModeSetting;
use workspace::notifications::{NotificationId, NotifyTaskExt};
use workspace::{Toast, Workspace};
@@ -37,6 +39,7 @@ pub struct MessageEditor {
thread: Entity<Thread>,
editor: Entity<Editor>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
context_store: Entity<ContextStore>,
context_strip: Entity<ContextStrip>,
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
@@ -44,7 +47,6 @@ pub struct MessageEditor {
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
model_selector: Entity<AssistantModelSelector>,
tool_selector: Entity<ToolSelector>,
edits_expanded: bool,
_subscriptions: Vec<Subscription>,
}
@@ -107,8 +109,9 @@ impl MessageEditor {
];
Self {
thread,
editor: editor.clone(),
project: thread.read(cx).project().clone(),
thread,
workspace,
context_store,
context_strip,
@@ -125,7 +128,6 @@ impl MessageEditor {
)
}),
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
edits_expanded: false,
_subscriptions: subscriptions,
}
}
@@ -206,12 +208,15 @@ impl MessageEditor {
let thread = self.thread.clone();
let context_store = self.context_store.clone();
cx.spawn(move |_, mut cx| async move {
let git_store = self.project.read(cx).git_store();
let checkpoint = git_store.read(cx).checkpoint(cx);
cx.spawn(async move |_, cx| {
refresh_task.await;
let checkpoint = checkpoint.await.log_err();
thread
.update(&mut cx, |thread, cx| {
.update(cx, |thread, cx| {
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
thread.insert_user_message(user_message, context, cx);
thread.insert_user_message(user_message, context, checkpoint, cx);
thread.send_to_model(model, request_kind, cx);
})
.ok();
@@ -297,9 +302,9 @@ impl MessageEditor {
.thread
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
cx.spawn(|_, mut cx| async move {
cx.spawn(async move |_, cx| {
report.await?;
workspace.update(&mut cx, |workspace, cx| {
workspace.update(cx, |workspace, cx| {
let message = if is_positive {
"Positive feedback recorded. Thank you!"
} else {
@@ -347,8 +352,12 @@ impl Render for MessageEditor {
px(64.)
};
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx);
let changed_buffers_count = changed_buffers.len();
let project = self.thread.read(cx).project();
let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
repository.read(cx).status().count()
} else {
0
};
v_flex()
.size_full()
@@ -410,7 +419,7 @@ impl Render for MessageEditor {
),
)
})
.when(changed_buffers_count > 0, |parent| {
.when(changed_files > 0, |parent| {
parent.child(
v_flex()
.mx_2()
@@ -421,96 +430,60 @@ impl Render for MessageEditor {
.rounded_t_md()
.child(
h_flex()
.gap_2()
.justify_between()
.p_2()
.child(
Disclosure::new("edits-disclosure", self.edits_expanded)
.on_click(cx.listener(|this, _ev, _window, cx| {
this.edits_expanded = !this.edits_expanded;
cx.notify();
})),
h_flex()
.gap_2()
.child(
IconButton::new(
"edits-disclosure",
IconName::GitBranchSmall,
)
.icon_size(IconSize::Small)
.on_click(
|_ev, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&git_panel::ToggleFocus)
});
},
),
)
.child(
Label::new(format!(
"{} {} changed",
changed_files,
if changed_files == 1 { "file" } else { "files" }
))
.size(LabelSize::XSmall)
.color(Color::Muted),
),
)
.child(
Label::new("Edits")
.size(LabelSize::XSmall)
.color(Color::Muted),
)
.child(Label::new("").size(LabelSize::XSmall).color(Color::Muted))
.child(
Label::new(format!(
"{} {}",
changed_buffers_count,
if changed_buffers_count == 1 {
"file"
} else {
"files"
}
))
.size(LabelSize::XSmall)
.color(Color::Muted),
h_flex()
.gap_2()
.child(
Button::new("review", "Review")
.label_size(LabelSize::XSmall)
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(
&git_ui::project_diff::Diff,
);
});
}),
)
.child(
Button::new("commit", "Commit")
.label_size(LabelSize::XSmall)
.on_click(|_event, _window, cx| {
cx.defer(|cx| {
cx.dispatch_action(&ExpandCommitEditor)
});
}),
),
),
)
.when(self.edits_expanded, |parent| {
parent.child(
v_flex().bg(cx.theme().colors().editor_background).children(
changed_buffers.enumerate().flat_map(|(index, buffer)| {
let file = buffer.read(cx).file()?;
let path = file.path();
let parent_label = path.parent().and_then(|parent| {
let parent_str = parent.to_string_lossy();
if parent_str.is_empty() {
None
} else {
Some(
Label::new(format!(
"{}{}",
parent_str,
std::path::MAIN_SEPARATOR_STR
))
.color(Color::Muted)
.size(LabelSize::Small),
)
}
});
let name_label = path.file_name().map(|name| {
Label::new(name.to_string_lossy().to_string())
.size(LabelSize::Small)
});
let file_icon = FileIcons::get_icon(&path, cx)
.map(Icon::from_path)
.unwrap_or_else(|| Icon::new(IconName::File));
let element = div()
.p_2()
.when(index + 1 < changed_buffers_count, |parent| {
parent
.border_color(cx.theme().colors().border)
.border_b_1()
})
.child(
h_flex()
.gap_2()
.child(file_icon)
.child(
// TODO: handle overflow
h_flex()
.children(parent_label)
.children(name_label),
)
// TODO: show lines changed
.child(Label::new("+").color(Color::Created))
.child(Label::new("-").color(Color::Deleted)),
);
Some(element)
}),
),
)
}),
),
)
})
.child(

View File

@@ -40,7 +40,7 @@ impl TerminalCodegen {
let telemetry = self.telemetry.clone();
self.status = CodegenStatus::Pending;
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
self.generation = cx.spawn(|this, mut cx| async move {
self.generation = cx.spawn(async move |this, cx| {
let model_telemetry_id = model.telemetry_id();
let model_provider_id = model.provider_id();
let response = model.stream_completion_text(prompt, &cx).await;
@@ -97,12 +97,12 @@ impl TerminalCodegen {
}
});
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.message_id = message_id;
})?;
while let Some(hunk) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Some(transaction) = &mut this.transaction {
transaction.push(hunk, cx);
cx.notify();
@@ -116,7 +116,7 @@ impl TerminalCodegen {
let result = generate.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Err(error) = result {
this.status = CodegenStatus::Error(error);
} else {

View File

@@ -16,6 +16,7 @@ use language_model::{
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
Role, StopReason, TokenUsage,
};
use project::git::GitStoreCheckpoint;
use project::Project;
use prompt_store::{AssistantSystemPromptWorktree, PromptBuilder};
use scripting_tool::{ScriptingSession, ScriptingTool};
@@ -89,6 +90,12 @@ pub struct GitState {
pub diff: Option<String>,
}
#[derive(Clone)]
pub struct ThreadCheckpoint {
message_id: MessageId,
git_checkpoint: GitStoreCheckpoint,
}
/// A thread of conversation with the LLM.
pub struct Thread {
id: ThreadId,
@@ -99,6 +106,7 @@ pub struct Thread {
next_message_id: MessageId,
context: BTreeMap<ContextId, ContextSnapshot>,
context_by_message: HashMap<MessageId, Vec<ContextId>>,
checkpoints_by_message: HashMap<MessageId, GitStoreCheckpoint>,
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
project: Entity<Project>,
@@ -128,6 +136,7 @@ impl Thread {
next_message_id: MessageId(0),
context: BTreeMap::default(),
context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project: project.clone(),
@@ -188,6 +197,7 @@ impl Thread {
next_message_id,
context: BTreeMap::default(),
context_by_message: HashMap::default(),
checkpoints_by_message: HashMap::default(),
completion_count: 0,
pending_completions: Vec::new(),
project,
@@ -249,6 +259,45 @@ impl Thread {
&self.tools
}
pub fn checkpoint_for_message(&self, id: MessageId) -> Option<ThreadCheckpoint> {
let checkpoint = self.checkpoints_by_message.get(&id).cloned()?;
Some(ThreadCheckpoint {
message_id: id,
git_checkpoint: checkpoint,
})
}
pub fn restore_checkpoint(
&mut self,
checkpoint: ThreadCheckpoint,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let project = self.project.read(cx);
let restore = project
.git_store()
.read(cx)
.restore_checkpoint(checkpoint.git_checkpoint, cx);
cx.spawn(async move |this, cx| {
restore.await?;
this.update(cx, |this, cx| this.truncate(checkpoint.message_id, cx))
})
}
pub fn truncate(&mut self, message_id: MessageId, cx: &mut Context<Self>) {
let Some(message_ix) = self
.messages
.iter()
.rposition(|message| message.id == message_id)
else {
return;
};
for deleted_message in self.messages.drain(message_ix..) {
self.context_by_message.remove(&deleted_message.id);
self.checkpoints_by_message.remove(&deleted_message.id);
}
cx.notify();
}
pub fn context_for_message(&self, id: MessageId) -> Option<Vec<ContextSnapshot>> {
let context = self.context_by_message.get(&id)?;
Some(
@@ -296,13 +345,6 @@ impl Thread {
self.scripting_tool_use.tool_results_for_message(id)
}
pub fn scripting_changed_buffers<'a>(
&self,
cx: &'a App,
) -> impl ExactSizeIterator<Item = &'a Entity<language::Buffer>> {
self.scripting_session.read(cx).changed_buffers()
}
pub fn message_has_tool_results(&self, message_id: MessageId) -> bool {
self.tool_use.message_has_tool_results(message_id)
}
@@ -315,6 +357,7 @@ impl Thread {
&mut self,
text: impl Into<String>,
context: Vec<ContextSnapshot>,
checkpoint: Option<GitStoreCheckpoint>,
cx: &mut Context<Self>,
) -> MessageId {
let message_id = self.insert_message(Role::User, text, cx);
@@ -322,6 +365,9 @@ impl Thread {
self.context
.extend(context.into_iter().map(|context| (context.id, context)));
self.context_by_message.insert(message_id, context_ids);
if let Some(checkpoint) = checkpoint {
self.checkpoints_by_message.insert(message_id, checkpoint);
}
message_id
}
@@ -394,9 +440,9 @@ impl Thread {
/// Serializes this thread into a format for storage or telemetry.
pub fn serialize(&self, cx: &mut Context<Self>) -> Task<Result<SerializedThread>> {
let initial_project_snapshot = self.initial_project_snapshot.clone();
cx.spawn(|this, cx| async move {
cx.spawn(async move |this, cx| {
let initial_project_snapshot = initial_project_snapshot.await;
this.read_with(&cx, |this, _| SerializedThread {
this.read_with(cx, |this, _| SerializedThread {
summary: this.summary_or_default(),
updated_at: this.updated_at(),
messages: this
@@ -602,8 +648,10 @@ impl Thread {
) {
let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn(|thread, mut cx| async move {
let task = cx.spawn(async move |thread, cx| {
let stream = model.stream_completion(request, &cx);
let initial_token_usage =
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage.clone());
let stream_completion = async {
let mut events = stream.await?;
let mut stop_reason = StopReason::EndTurn;
@@ -612,7 +660,7 @@ impl Thread {
while let Some(event) = events.next().await {
let event = event?;
thread.update(&mut cx, |thread, cx| {
thread.update(cx, |thread, cx| {
match event {
LanguageModelCompletionEvent::StartMessage { .. } => {
thread.insert_message(Role::Assistant, String::new(), cx);
@@ -671,7 +719,7 @@ impl Thread {
smol::future::yield_now().await;
}
thread.update(&mut cx, |thread, cx| {
thread.update(cx, |thread, cx| {
thread
.pending_completions
.retain(|completion| completion.id != pending_completion_id);
@@ -687,7 +735,7 @@ impl Thread {
let result = stream_completion.await;
thread
.update(&mut cx, |thread, cx| {
.update(cx, |thread, cx| {
match result.as_ref() {
Ok(stop_reason) => match stop_reason {
StopReason::ToolUse => {
@@ -718,6 +766,21 @@ impl Thread {
}
}
cx.emit(ThreadEvent::DoneStreaming);
if let Ok(initial_usage) = initial_token_usage {
let usage = thread.cumulative_token_usage.clone() - initial_usage;
telemetry::event!(
"Assistant Thread Completion",
thread_id = thread.id().to_string(),
model = model.telemetry_id(),
model_provider = model.provider_id().to_string(),
input_tokens = usage.input_tokens,
output_tokens = usage.output_tokens,
cache_creation_input_tokens = usage.cache_creation_input_tokens,
cache_read_input_tokens = usage.cache_read_input_tokens,
);
}
})
.ok();
});
@@ -750,7 +813,7 @@ impl Thread {
cache: false,
});
self.pending_summary = cx.spawn(|this, mut cx| {
self.pending_summary = cx.spawn(async move |this, cx| {
async move {
let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?;
@@ -767,7 +830,7 @@ impl Thread {
}
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if !new_summary.is_empty() {
this.summary = Some(new_summary.into());
}
@@ -778,6 +841,7 @@ impl Thread {
anyhow::Ok(())
}
.log_err()
.await
});
}
@@ -823,10 +887,10 @@ impl Thread {
});
let session = self.scripting_session.clone();
cx.spawn(|_, cx| async move {
cx.spawn(async move |_, cx| {
script_task.await;
let message = session.read_with(&cx, |session, _cx| {
let message = session.read_with(cx, |session, _cx| {
// Using a id to get the script output seems impractical.
// Why not just include it in the Task result?
// This is because we'll later report the script state as it runs,
@@ -851,12 +915,12 @@ impl Thread {
output: Task<Result<String>>,
cx: &mut Context<Self>,
) {
let insert_output_task = cx.spawn(|thread, mut cx| {
let insert_output_task = cx.spawn({
let tool_use_id = tool_use_id.clone();
async move {
async move |thread, cx| {
let output = output.await;
thread
.update(&mut cx, |thread, cx| {
.update(cx, |thread, cx| {
let pending_tool_use = thread
.tool_use
.insert_tool_output(tool_use_id.clone(), output);
@@ -881,12 +945,12 @@ impl Thread {
output: Task<Result<String>>,
cx: &mut Context<Self>,
) {
let insert_output_task = cx.spawn(|thread, mut cx| {
let insert_output_task = cx.spawn({
let tool_use_id = tool_use_id.clone();
async move {
async move |thread, cx| {
let output = output.await;
thread
.update(&mut cx, |thread, cx| {
.update(cx, |thread, cx| {
let pending_tool_use = thread
.scripting_tool_use
.insert_tool_output(tool_use_id.clone(), output);
@@ -923,6 +987,7 @@ impl Thread {
// so for now we provide some text to keep the model on track.
"Here are the tool results.",
Vec::new(),
None,
cx,
);
}
@@ -985,7 +1050,7 @@ impl Thread {
.map(|worktree| Self::worktree_snapshot(worktree, cx))
.collect();
cx.spawn(move |_, cx| async move {
cx.spawn(async move |_, cx| {
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
let mut unsaved_buffers = Vec::new();
@@ -1012,7 +1077,7 @@ impl Thread {
}
fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> {
cx.spawn(move |cx| async move {
cx.spawn(async move |cx| {
// Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| {
let worktree = worktree.read(app_cx);
@@ -1036,7 +1101,7 @@ impl Thread {
let current_branch = repo_entry.branch().map(|branch| branch.name.to_string());
// Get repository info
let repo_result = worktree.read_with(&cx, |worktree, _cx| {
let repo_result = worktree.read_with(cx, |worktree, _cx| {
if let project::Worktree::Local(local_worktree) = &worktree {
local_worktree.get_local_repo(repo_entry).map(|local_repo| {
let repo = local_repo.repo();
@@ -1051,7 +1116,7 @@ impl Thread {
Ok(Some((remote_url, head_sha, repository))) => {
// Get diff asynchronously
let diff = repository
.diff(git::repository::DiffType::HeadToWorktree, cx)
.diff(git::repository::DiffType::HeadToWorktree, cx.clone())
.await
.ok();
@@ -1126,6 +1191,10 @@ impl Thread {
&self.action_log
}
pub fn project(&self) -> &Entity<Project> {
&self.project
}
pub fn cumulative_token_usage(&self) -> TokenUsage {
self.cumulative_token_usage.clone()
}

View File

@@ -65,6 +65,14 @@ impl ThreadStore {
Ok(this)
}
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
self.context_server_manager.clone()
}
pub fn tools(&self) -> Arc<ToolWorkingSet> {
self.tools.clone()
}
/// Returns the number of threads.
pub fn thread_count(&self) -> usize {
self.threads.len()
@@ -98,14 +106,14 @@ impl ThreadStore {
) -> Task<Result<Entity<Thread>>> {
let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?;
let thread = database
.try_find_thread(id.clone())
.await?
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
cx.new(|cx| {
Thread::deserialize(
id.clone(),
@@ -125,23 +133,23 @@ impl ThreadStore {
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let serialized_thread = serialized_thread.await?;
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.save_thread(metadata, serialized_thread).await?;
this.update(&mut cx, |this, cx| this.reload(cx))?.await
this.update(cx, |this, cx| this.reload(cx))?.await
})
}
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
let id = id.clone();
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let database = database_future.await.map_err(|err| anyhow!(err))?;
database.delete_thread(id.clone()).await?;
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.threads.retain(|thread| thread.id != id)
})
})
@@ -149,14 +157,14 @@ impl ThreadStore {
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
let database_future = ThreadsDatabase::global_future(cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let threads = database_future
.await
.map_err(|err| anyhow!(err))?
.list_threads()
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.threads = threads;
cx.notify();
})
@@ -185,7 +193,7 @@ impl ThreadStore {
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
|this, mut cx| async move {
async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
@@ -210,7 +218,7 @@ impl ThreadStore {
})
.collect::<Vec<_>>();
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.context_server_tool_ids.insert(server_id, tool_ids);
})
.log_err();

View File

@@ -5,13 +5,19 @@ use gpui::Entity;
use scripting_tool::ScriptingTool;
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
use crate::agent_profile::AgentProfile;
pub struct ToolSelector {
profiles: Vec<AgentProfile>,
tools: Arc<ToolWorkingSet>,
}
impl ToolSelector {
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
Self { tools }
Self {
profiles: vec![AgentProfile::read_only(), AgentProfile::code_writer()],
tools,
}
}
fn build_context_menu(
@@ -19,9 +25,31 @@ impl ToolSelector {
window: &mut Window,
cx: &mut Context<Self>,
) -> Entity<ContextMenu> {
let profiles = self.profiles.clone();
let tool_set = self.tools.clone();
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
let icon_position = IconPosition::End;
menu = menu.header("Profiles");
for profile in profiles.clone() {
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
let tools = tool_set.clone();
move |_window, cx| {
tools.disable_source(ToolSource::Native, cx);
tools.enable(
ToolSource::Native,
&profile
.tools
.iter()
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
.collect::<Vec<_>>(),
);
}
});
}
menu = menu.separator();
let tools_by_source = tool_set.tools_by_source(cx);
let all_tools_enabled = tool_set.are_all_tools_enabled();

View File

@@ -286,9 +286,17 @@ impl ToolUseState {
) {
if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) {
for tool_use in tool_uses {
request_message
.content
.push(MessageContent::ToolUse(tool_use.clone()));
if self.tool_results.contains_key(&tool_use.id) {
// Do not send tool uses until they are completed
request_message
.content
.push(MessageContent::ToolUse(tool_use.clone()));
} else {
log::debug!(
"skipped tool use {:?} because it is still pending",
tool_use
);
}
}
}
}
@@ -301,9 +309,19 @@ impl ToolUseState {
if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) {
for tool_use_id in tool_uses {
if let Some(tool_result) = self.tool_results.get(tool_use_id) {
request_message
.content
.push(MessageContent::ToolResult(tool_result.clone()));
request_message.content.push(MessageContent::ToolResult(
LanguageModelToolResult {
tool_use_id: tool_use_id.clone(),
is_error: tool_result.is_error,
content: if tool_result.content.is_empty() {
// Surprisingly, the API fails if we return an empty string here.
// It thinks we are sending a tool use without a tool result.
"<Tool returned an empty string>".into()
} else {
tool_result.content.clone()
},
},
));
}
}
}

View File

@@ -1144,9 +1144,9 @@ impl AssistantContext {
fn set_language(&mut self, cx: &mut Context<Self>) {
let markdown = self.language_registry.language_for_name("Markdown");
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let markdown = markdown.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.buffer
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
})
@@ -1188,7 +1188,7 @@ impl AssistantContext {
return;
};
let debounce = self.token_count.is_some();
self.pending_token_count = cx.spawn(|this, mut cx| {
self.pending_token_count = cx.spawn(async move |this, cx| {
async move {
if debounce {
cx.background_executor()
@@ -1197,13 +1197,14 @@ impl AssistantContext {
}
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.token_count = Some(token_count);
this.start_cache_warming(&model, cx);
cx.notify()
})
}
.log_err()
.await
});
}
@@ -1342,7 +1343,7 @@ impl AssistantContext {
};
let model = Arc::clone(model);
self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
async move {
match model.stream_completion(request, &cx).await {
Ok(mut stream) => {
@@ -1353,13 +1354,14 @@ impl AssistantContext {
log::warn!("Cache warming failed: {}", e);
}
};
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.update_cache_status_for_completion(cx);
})
.ok();
anyhow::Ok(())
}
.log_err()
.await
});
}
@@ -1916,7 +1918,7 @@ impl AssistantContext {
});
self.reparse(cx);
let insert_output_task = cx.spawn(|this, mut cx| async move {
let insert_output_task = cx.spawn(async move |this, cx| {
let run_command = async {
let mut stream = output.await?;
@@ -1933,7 +1935,7 @@ impl AssistantContext {
while let Some(event) = stream.next().await {
let event = event?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.buffer.update(cx, |buffer, _cx| {
buffer.finalize_last_transaction();
buffer.start_transaction()
@@ -2034,7 +2036,7 @@ impl AssistantContext {
})?;
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction();
buffer.start_transaction();
@@ -2080,7 +2082,7 @@ impl AssistantContext {
let command_result = run_command.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id)
@@ -2210,7 +2212,7 @@ impl AssistantContext {
let pending_completion_id = post_inc(&mut self.completion_count);
let task = cx.spawn({
|this, mut cx| async move {
async move |this, cx| {
let stream = model.stream_completion(request, &cx);
let assistant_message_id = assistant_message.id;
let mut response_latency = None;
@@ -2225,7 +2227,7 @@ impl AssistantContext {
}
let event = event?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let message_ix = this
.message_anchors
.iter()
@@ -2264,7 +2266,7 @@ impl AssistantContext {
})?;
smol::future::yield_now().await;
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_completions
.retain(|completion| completion.id != pending_completion_id);
this.summarize(false, cx);
@@ -2276,7 +2278,7 @@ impl AssistantContext {
let result = stream_completion.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let error_message = if let Some(error) = result.as_ref().err() {
if error.is::<PaymentRequiredError>() {
cx.emit(ContextEvent::ShowPaymentRequiredError);
@@ -2786,7 +2788,7 @@ impl AssistantContext {
cache: false,
});
self.pending_summary = cx.spawn(|this, mut cx| {
self.pending_summary = cx.spawn(async move |this, cx| {
async move {
let stream = model.stream_completion_text(request, &cx);
let mut messages = stream.await?;
@@ -2795,7 +2797,7 @@ impl AssistantContext {
while let Some(message) = messages.stream.next().await {
let text = message?;
let mut lines = text.lines();
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
let summary = this.summary.get_or_insert(ContextSummary::default());
@@ -2819,7 +2821,7 @@ impl AssistantContext {
}
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let version = this.version.clone();
let timestamp = this.next_timestamp();
if let Some(summary) = this.summary.as_mut() {
@@ -2837,6 +2839,7 @@ impl AssistantContext {
anyhow::Ok(())
}
.log_err()
.await
});
}
}
@@ -2943,12 +2946,12 @@ impl AssistantContext {
return;
}
self.pending_save = cx.spawn(|this, mut cx| async move {
self.pending_save = cx.spawn(async move |this, cx| {
if let Some(debounce) = debounce {
cx.background_executor().timer(debounce).await;
}
let (old_path, summary) = this.read_with(&cx, |this, _| {
let (old_path, summary) = this.read_with(cx, |this, _| {
let path = this.path.clone();
let summary = if let Some(summary) = this.summary.as_ref() {
if summary.done {
@@ -2963,7 +2966,7 @@ impl AssistantContext {
})?;
if let Some(summary) = summary {
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
let context = this.read_with(cx, |this, cx| this.serialize(cx))?;
let mut discriminant = 1;
let mut new_path;
loop {
@@ -2995,7 +2998,7 @@ impl AssistantContext {
}
}
this.update(&mut cx, |this, _| this.path = Some(new_path))?;
this.update(cx, |this, _| this.path = Some(new_path))?;
}
Ok(())

View File

@@ -907,7 +907,7 @@ impl ContextEditor {
if editor_state.opened_patch != patch {
state.update_task = Some({
let this = this.clone();
cx.spawn_in(window, |_, cx| async move {
cx.spawn_in(window, async move |_, cx| {
Self::update_patch_editor(this.clone(), patch, cx)
.await
.log_err();
@@ -1070,10 +1070,9 @@ impl ContextEditor {
})
.ok();
} else {
patch_state.update_task =
Some(cx.spawn_in(window, move |this, cx| async move {
Self::open_patch_editor(this, new_patch, cx).await.log_err();
}));
patch_state.update_task = Some(cx.spawn_in(window, async move |this, cx| {
Self::open_patch_editor(this, new_patch, cx).await.log_err();
}));
}
}
}
@@ -1103,10 +1102,10 @@ impl ContextEditor {
async fn open_patch_editor(
this: WeakEntity<Self>,
patch: AssistantPatch,
mut cx: AsyncWindowContext,
cx: &mut AsyncWindowContext,
) -> Result<()> {
let project = this.read_with(&cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
let project = this.read_with(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), cx).await;
let editor = cx.new_window_entity(|window, cx| {
let editor = ProposedChangesEditor::new(
@@ -1130,7 +1129,7 @@ impl ContextEditor {
editor
})?;
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
if let Some(patch_state) = this.patches.get_mut(&patch.range) {
patch_state.editor = Some(PatchEditorState {
editor: editor.downgrade(),
@@ -1139,8 +1138,8 @@ impl ContextEditor {
patch_state.update_task.take();
}
})?;
this.read_with(&cx, |this, _| this.workspace.clone())?
.update_in(&mut cx, |workspace, window, cx| {
this.read_with(cx, |this, _| this.workspace.clone())?
.update_in(cx, |workspace, window, cx| {
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx)
})
.log_err();
@@ -1151,11 +1150,11 @@ impl ContextEditor {
async fn update_patch_editor(
this: WeakEntity<Self>,
patch: AssistantPatch,
mut cx: AsyncWindowContext,
cx: &mut AsyncWindowContext,
) -> Result<()> {
let project = this.update(&mut cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
this.update_in(&mut cx, |this, window, cx| {
let project = this.update(cx, |this, _| this.project.clone())?;
let resolved_patch = patch.resolve(project.clone(), cx).await;
this.update_in(cx, |this, window, cx| {
let patch_state = this.patches.get_mut(&patch.range)?;
let locations = resolved_patch
@@ -1625,14 +1624,14 @@ impl ContextEditor {
.map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx))
.collect::<Vec<_>>();
cx.spawn(move |_, cx| async move {
cx.spawn(async move |_, cx| {
let mut paths = vec![];
let mut worktrees = vec![];
let opened_paths = futures::future::join_all(tasks).await;
for (worktree, project_path) in opened_paths.into_iter().flatten() {
let Ok(worktree_root_name) =
worktree.read_with(&cx, |worktree, _| worktree.root_name().to_string())
worktree.read_with(cx, |worktree, _| worktree.root_name().to_string())
else {
continue;
};
@@ -1649,12 +1648,12 @@ impl ContextEditor {
};
window
.spawn(cx, |mut cx| async move {
.spawn(cx, async move |cx| {
let (paths, dragged_file_worktrees) = paths.await;
let cmd_name = FileSlashCommand.name();
context_editor_view
.update_in(&mut cx, |context_editor, window, cx| {
.update_in(cx, |context_editor, window, cx| {
let file_argument = paths
.into_iter()
.map(|path| path.to_string_lossy().to_string())
@@ -2200,9 +2199,9 @@ impl ContextEditor {
.log_err();
if let Some(client) = client {
cx.spawn(|this, mut cx| async move {
client.authenticate_and_connect(true, &mut cx).await?;
this.update(&mut cx, |_, cx| cx.notify())
cx.spawn(async move |this, cx| {
client.authenticate_and_connect(true, cx).await?;
this.update(cx, |_, cx| cx.notify())
})
.detach_and_log_err(cx)
}
@@ -3161,10 +3160,10 @@ impl FollowableItem for ContextEditor {
assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx)
});
Some(window.spawn(cx, |mut cx| async move {
Some(window.spawn(cx, async move |cx| {
let context_editor = context_editor_task.await?;
context_editor
.update_in(&mut cx, |context_editor, window, cx| {
.update_in(cx, |context_editor, window, cx| {
context_editor.remote_id = Some(id);
context_editor.editor.update(cx, |editor, cx| {
editor.apply_update_proto(

View File

@@ -164,9 +164,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let search = self.store.read(cx).search(query, cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let matches = search.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let host_contexts = this.delegate.store.read(cx).host_contexts();
this.delegate.matches = host_contexts
.iter()

View File

@@ -100,7 +100,7 @@ impl ContextStore {
let fs = project.read(cx).fs().clone();
let languages = project.read(cx).languages().clone();
let telemetry = project.read(cx).client().telemetry().clone();
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
@@ -125,16 +125,15 @@ impl ContextStore {
languages,
slash_commands,
telemetry,
_watch_updates: cx.spawn(|this, mut cx| {
_watch_updates: cx.spawn(async move |this, cx| {
async move {
while events.next().await.is_some() {
this.update(&mut cx, |this, cx| this.reload(cx))?
.await
.log_err();
this.update(cx, |this, cx| this.reload(cx))?.await.log_err();
}
anyhow::Ok(())
}
.log_err()
.await
}),
client_subscription: None,
_project_subscriptions: vec![
@@ -395,7 +394,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
let request = self.client.request(proto::CreateContext { project_id });
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = request.await?;
let context_id = ContextId::from_proto(response.context_id);
let context_proto = response.context.context("invalid context")?;
@@ -421,8 +420,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
@@ -457,7 +456,7 @@ impl ContextStore {
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let saved_context = load.await?;
let context = cx.new(|cx| {
AssistantContext::deserialize(
@@ -471,7 +470,7 @@ impl ContextStore {
cx,
)
})?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
existing_context
} else {
@@ -489,7 +488,7 @@ impl ContextStore {
) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
fs.remove_file(
&path,
RemoveOptions {
@@ -499,7 +498,7 @@ impl ContextStore {
)
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.contexts.retain(|context| {
context
.upgrade()
@@ -565,7 +564,7 @@ impl ContextStore {
});
let prompt_builder = self.prompt_builder.clone();
let slash_commands = self.slash_commands.clone();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = request.await?;
let context_proto = response.context.context("invalid context")?;
let context = cx.new(|cx| {
@@ -590,8 +589,8 @@ impl ContextStore {
.collect::<Result<Vec<_>>>()
})
.await?;
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(&mut cx, |this, cx| {
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
this.update(cx, |this, cx| {
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
existing_context
} else {
@@ -700,12 +699,12 @@ impl ContextStore {
project_id,
contexts,
});
cx.spawn(|this, cx| async move {
cx.spawn(async move |this, cx| {
let response = request.await?;
let mut context_ids = Vec::new();
let mut operations = Vec::new();
this.read_with(&cx, |this, cx| {
this.read_with(cx, |this, cx| {
for context_version_proto in response.contexts {
let context_version = ContextVersion::from_proto(&context_version_proto);
let context_id = ContextId::from_proto(context_version_proto.context_id);
@@ -768,7 +767,7 @@ impl ContextStore {
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let fs = self.fs.clone();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
fs.create_dir(contexts_dir()).await?;
let mut paths = fs.read_dir(contexts_dir()).await?;
@@ -808,7 +807,7 @@ impl ContextStore {
}
contexts.sort_unstable_by_key(|context| Reverse(context.mtime));
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.contexts_metadata = contexts;
cx.notify();
})
@@ -819,7 +818,7 @@ impl ContextStore {
cx.update_entity(
&self.context_server_manager,
|context_server_manager, cx| {
for server in context_server_manager.servers() {
for server in context_server_manager.running_servers() {
context_server_manager
.restart_server(&server.id(), cx)
.detach_and_log_err(cx);
@@ -850,7 +849,7 @@ impl ContextStore {
cx.spawn({
let server = server.clone();
let server_id = server_id.clone();
|this, mut cx| async move {
async move |this, cx| {
let Some(protocol) = server.client() else {
return;
};
@@ -875,7 +874,7 @@ impl ContextStore {
})
.collect::<Vec<_>>();
this.update(&mut cx, |this, _cx| {
this.update( cx, |this, _cx| {
this.context_server_slash_command_ids
.insert(server_id.clone(), slash_command_ids);
})

View File

@@ -59,7 +59,7 @@ impl SlashCommandCompletionProvider {
let command_name = command_name.to_string();
let editor = self.editor.clone();
let workspace = self.workspace.clone();
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let matches = match_strings(
&candidates,
&command_name,

View File

@@ -100,7 +100,7 @@ impl PickerDelegate for SlashCommandDelegate {
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let all_commands = self.all_commands.clone();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let filtered_commands = cx
.background_spawn(async move {
if query.is_empty() {
@@ -119,7 +119,7 @@ impl PickerDelegate for SlashCommandDelegate {
})
.await;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
this.delegate.filtered_commands = filtered_commands;
this.delegate.set_selected_index(0, window, cx);
cx.notify();

View File

@@ -63,14 +63,14 @@ impl Eval {
model: Arc<dyn LanguageModel>,
cx: &mut App,
) -> Task<anyhow::Result<EvalOutput>> {
cx.spawn(move |mut cx| async move {
cx.spawn(async move |cx| {
checkout_repo(&self.eval_setup, &self.repo_path).await?;
let (assistant, done_rx) =
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
let _worktree = assistant
.update(&mut cx, |assistant, cx| {
.update(cx, |assistant, cx| {
assistant.project.update(cx, |project, cx| {
project.create_worktree(&self.repo_path, true, cx)
})
@@ -79,10 +79,10 @@ impl Eval {
let start_time = std::time::SystemTime::now();
assistant.update(&mut cx, |assistant, cx| {
assistant.update(cx, |assistant, cx| {
assistant.thread.update(cx, |thread, cx| {
let context = vec![];
thread.insert_user_message(self.user_prompt.clone(), context, cx);
thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
thread.send_to_model(model, RequestKind::Chat, cx);
});
})?;
@@ -93,7 +93,7 @@ impl Eval {
let diff = query_git(&self.repo_path, vec!["diff"]).await?;
assistant.update(&mut cx, |assistant, cx| {
assistant.update(cx, |assistant, cx| {
let thread = assistant.thread.read(cx);
let last_message = thread.messages().last().unwrap();
if last_message.role != language_model::Role::Assistant {

View File

@@ -212,7 +212,7 @@ pub fn authenticate_model_provider(
pub async fn send_language_model_request(
model: Arc<dyn LanguageModel>,
request: LanguageModelRequest,
cx: AsyncApp,
cx: &mut AsyncApp,
) -> anyhow::Result<String> {
match model.stream_completion_text(request, &cx).await {
Ok(mut stream) => {

View File

@@ -61,7 +61,7 @@ impl Judge {
};
let model = self.model.clone();
cx.spawn(move |cx| send_language_model_request(model, request, cx))
cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
}
}

View File

@@ -111,7 +111,7 @@ fn main() {
let editor_model_provider_id = editor_model.provider_id();
let judge_model_provider_id = judge_model.provider_id();
cx.spawn(move |cx| async move {
cx.spawn(async move |cx| {
// Authenticate all model providers first
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
.unwrap()

View File

@@ -77,8 +77,8 @@ impl SlashCommand for AutoCommand {
let cx: &mut App = cx;
cx.spawn(|cx: gpui::AsyncApp| async move {
let task = project_index.read_with(&cx, |project_index, cx| {
cx.spawn(async move |cx| {
let task = project_index.read_with(cx, |project_index, cx| {
project_index.flush_summary_backlogs(cx)
})?;
@@ -117,9 +117,9 @@ impl SlashCommand for AutoCommand {
return Task::ready(Err(anyhow!("no project indexer")));
};
let task = window.spawn(cx, |cx| async move {
let task = window.spawn(cx, async move |cx| {
let summaries = project_index
.read_with(&cx, |project_index, cx| project_index.all_summaries(cx))?
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
.await?;
commands_for_summaries(&summaries, &original_prompt, &cx).await

View File

@@ -186,7 +186,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
window.spawn(cx, move |_| async move {
window.spawn(cx, async move |_| {
task.await?
.map(|output| output.to_event_stream())
.ok_or_else(|| anyhow!("No diagnostics found"))
@@ -268,7 +268,7 @@ fn collect_diagnostics(
})
.collect();
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let mut output = SlashCommandOutput::default();
if let Some(error_source) = error_source.as_ref() {
@@ -299,7 +299,7 @@ fn collect_diagnostics(
}
if let Some(buffer) = project_handle
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))?
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
.await
.log_err()
{

View File

@@ -241,7 +241,7 @@ fn collect_files(
.collect::<Vec<_>>();
let (events_tx, events_rx) = mpsc::unbounded();
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
for snapshot in snapshots {
let worktree_id = snapshot.id();
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
@@ -352,7 +352,7 @@ fn collect_files(
)))?;
} else if entry.is_file() {
let Some(open_buffer_task) = project_handle
.update(&mut cx, |project, cx| {
.update(cx, |project, cx| {
project.open_buffer((worktree_id, &entry.path), cx)
})
.ok()
@@ -361,7 +361,7 @@ fn collect_files(
};
if let Some(buffer) = open_buffer_task.await.log_err() {
let mut output = SlashCommandOutput::default();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
Some(&path_including_worktree_name),

View File

@@ -99,7 +99,7 @@ impl SlashCommand for ProjectSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
let prompt =
@@ -123,7 +123,7 @@ impl SlashCommand for ProjectSlashCommand {
.search_queries;
let results = project_index
.read_with(&cx, |project_index, cx| {
.read_with(cx, |project_index, cx| {
project_index.search(search_queries.clone(), 25, cx)
})?
.await?;

View File

@@ -109,9 +109,9 @@ impl SlashCommand for SearchSlashCommand {
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
};
window.spawn(cx, |cx| async move {
window.spawn(cx, async move |cx| {
let results = project_index
.read_with(&cx, |project_index, cx| {
.read_with(cx, |project_index, cx| {
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
})?
.await?;

View File

@@ -86,7 +86,7 @@ impl SlashCommand for TabSlashCommand {
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
window.spawn(cx, |_| async move {
window.spawn(cx, async move |_| {
let tab_items = tab_items_search.await?;
let run_command = tab_items.len() == 1;
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
@@ -172,11 +172,11 @@ fn tab_items_for_queries(
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
let queries = queries.to_owned();
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let mut open_buffers =
workspace
.context("no workspace")?
.update(&mut cx, |workspace, cx| {
.update(cx, |workspace, cx| {
if strict_match && empty_query {
let snapshot = active_item_buffer(workspace, cx)?;
let full_path = snapshot.resolve_file_path(cx, true);

View File

@@ -43,3 +43,4 @@ language = { workspace = true, features = ["test-support"] }
project = { workspace = true, features = ["test-support"] }
rand.workspace = true
workspace = { workspace = true, features = ["test-support"] }
unindent.workspace = true

View File

@@ -50,7 +50,7 @@ impl Tool for BashTool {
};
let working_directory = worktree.read(cx).abs_path();
cx.spawn(|_| async move {
cx.spawn(async move |_| {
// Add 2>&1 to merge stderr into stdout for proper interleaving.
let command = format!("({}) 2>&1", input.command);

View File

@@ -65,10 +65,10 @@ impl Tool for DiagnosticsTool {
};
let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx));
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
let mut output = String::new();
let buffer = buffer.await?;
let snapshot = buffer.read_with(&cx, |buffer, _cx| buffer.snapshot())?;
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
for (_, group) in snapshot.diagnostic_groups(None) {
let entry = &group.entries[group.primary_ix];

View File

@@ -1,5 +1,6 @@
mod edit_action;
pub mod log;
mod replace;
use anyhow::{anyhow, Context, Result};
use assistant_tool::{ActionLog, Tool};
@@ -11,12 +12,12 @@ use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
};
use log::{EditToolLog, EditToolRequestId};
use project::{search::SearchQuery, Project};
use project::Project;
use replace::{replace_exact, replace_with_flexible_indent};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::fmt::Write;
use std::sync::Arc;
use util::paths::PathMatcher;
use util::ResultExt;
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -103,7 +104,7 @@ impl Tool for EditFilesTool {
cx,
);
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let result = task.await;
let str_result = match &result {
@@ -111,10 +112,8 @@ impl Tool for EditFilesTool {
Err(err) => Err(err.to_string()),
};
log.update(&mut cx, |log, cx| {
log.set_tool_output(req_id, str_result, cx)
})
.log_err();
log.update(cx, |log, cx| log.set_tool_output(req_id, str_result, cx))
.log_err();
result
})
@@ -188,7 +187,7 @@ impl EditToolRequest {
cache: false,
});
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let llm_request = LanguageModelRequest {
messages,
tools: vec![],
@@ -211,10 +210,10 @@ impl EditToolRequest {
};
while let Some(chunk) = chunks.stream.next().await {
request.process_response_chunk(&chunk?, &mut cx).await?;
request.process_response_chunk(&chunk?, cx).await?;
}
request.finalize(&mut cx).await
request.finalize(cx).await
})
}
@@ -291,41 +290,18 @@ impl EditToolRequest {
file_path: std::path::PathBuf,
snapshot: language::BufferSnapshot,
) -> Result<DiffResult> {
let query = SearchQuery::text(
old.clone(),
false,
true,
true,
PathMatcher::new(&[])?,
PathMatcher::new(&[])?,
None,
)?;
let result =
// Try to match exactly
replace_exact(&old, &new, &snapshot)
.await
// If that fails, try being flexible about indentation
.or_else(|| replace_with_flexible_indent(&old, &new, &snapshot));
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return Ok(DiffResult::BadSearch(BadSearch {
search: new.clone(),
let Some(diff) = result else {
return anyhow::Ok(DiffResult::BadSearch(BadSearch {
search: old,
file_path: file_path.display().to_string(),
}));
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
};
anyhow::Ok(DiffResult::Diff(diff))
@@ -378,25 +354,29 @@ impl EditToolRequest {
if !self.bad_searches.is_empty() {
writeln!(
&mut output,
"\n\nThese searches failed because they didn't match any strings:"
"\n\n# {} SEARCH/REPLACE block(s) failed to match:\n",
self.bad_searches.len()
)?;
for replace in self.bad_searches {
writeln!(
&mut output,
"- '{}' does not appear in `{}`",
replace.search.replace("\r", "\\r").replace("\n", "\\n"),
replace.file_path
"## No exact match in: {}\n```\n{}\n```\n",
replace.file_path, replace.search,
)?;
}
write!(&mut output, "Make sure to use exact searches.")?;
write!(&mut output,
"The SEARCH section must exactly match an existing block of lines including all white \
space, comments, indentation, docstrings, etc."
)?;
}
if !errors.is_empty() {
writeln!(
&mut output,
"\n\nThese SEARCH/REPLACE blocks failed to parse:"
"\n\n# {} SEARCH/REPLACE blocks failed to parse:",
errors.len()
)?;
for error in errors {
@@ -404,10 +384,22 @@ impl EditToolRequest {
}
}
if changed_buffer_count > 0 {
writeln!(
&mut output,
"\n\nThe other SEARCH/REPLACE blocks were applied successfully. Do not re-send them!",
)?;
}
writeln!(
&mut output,
"\nYou can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them."
"{}You can fix errors by running the tool again. You can include instructions, \
but errors are part of the conversation so you don't need to repeat them.",
if changed_buffer_count == 0 {
"\n\n"
} else {
""
}
)?;
Err(anyhow!(output))

View File

@@ -0,0 +1,525 @@
use language::{BufferSnapshot, Diff, Point, ToOffset};
use project::search::SearchQuery;
use util::{paths::PathMatcher, ResultExt as _};
/// Performs an exact string replacement in a buffer, requiring precise character-for-character matching.
/// Uses the search functionality to locate the first occurrence of the exact string.
/// Returns None if no exact match is found in the buffer.
pub async fn replace_exact(old: &str, new: &str, snapshot: &BufferSnapshot) -> Option<Diff> {
let query = SearchQuery::text(
old,
false,
true,
true,
PathMatcher::new(&[]).ok()?,
PathMatcher::new(&[]).ok()?,
None,
)
.log_err()?;
let matches = query.search(&snapshot, None).await;
if matches.is_empty() {
return None;
}
let edit_range = matches[0].clone();
let diff = language::text_diff(&old, &new);
let edits = diff
.into_iter()
.map(|(old_range, text)| {
let start = edit_range.start + old_range.start;
let end = edit_range.start + old_range.end;
(start..end, text)
})
.collect::<Vec<_>>();
let diff = language::Diff {
base_version: snapshot.version().clone(),
line_ending: snapshot.line_ending(),
edits,
};
Some(diff)
}
/// Performs a replacement that's indentation-aware - matches text content ignoring leading whitespace differences.
/// When replacing, preserves the indentation level found in the buffer at each matching line.
/// Returns None if no match found or if indentation is offset inconsistently across matched lines.
pub fn replace_with_flexible_indent(old: &str, new: &str, buffer: &BufferSnapshot) -> Option<Diff> {
let (old_lines, old_min_indent) = lines_with_min_indent(old);
let (new_lines, new_min_indent) = lines_with_min_indent(new);
let min_indent = old_min_indent.min(new_min_indent);
let old_lines = drop_lines_prefix(&old_lines, min_indent);
let new_lines = drop_lines_prefix(&new_lines, min_indent);
let max_row = buffer.max_point().row;
'windows: for start_row in 0..max_row.saturating_sub(old_lines.len() as u32 - 1) {
let mut common_leading = None;
let end_row = start_row + old_lines.len() as u32 - 1;
if end_row > max_row {
// The buffer ends before fully matching the pattern
return None;
}
let start_point = Point::new(start_row, 0);
let end_point = Point::new(end_row, buffer.line_len(end_row));
let range = start_point.to_offset(buffer)..end_point.to_offset(buffer);
let window_text = buffer.text_for_range(range.clone());
let mut window_lines = window_text.lines();
let mut old_lines_iter = old_lines.iter();
while let (Some(window_line), Some(old_line)) = (window_lines.next(), old_lines_iter.next())
{
let line_trimmed = window_line.trim_start();
if line_trimmed != old_line.trim_start() {
continue 'windows;
}
if line_trimmed.is_empty() {
continue;
}
let line_leading = &window_line[..window_line.len() - old_line.len()];
match &common_leading {
Some(common_leading) if common_leading != line_leading => {
continue 'windows;
}
Some(_) => (),
None => common_leading = Some(line_leading.to_string()),
}
}
if let Some(common_leading) = common_leading {
let line_ending = buffer.line_ending();
let replacement = new_lines
.iter()
.map(|new_line| {
if new_line.trim().is_empty() {
new_line.to_string()
} else {
common_leading.to_string() + new_line
}
})
.collect::<Vec<_>>()
.join(line_ending.as_str());
let diff = Diff {
base_version: buffer.version().clone(),
line_ending,
edits: vec![(range, replacement.into())],
};
return Some(diff);
}
}
None
}
fn drop_lines_prefix<'a>(lines: &'a [&str], prefix_len: usize) -> Vec<&'a str> {
lines
.iter()
.map(|line| line.get(prefix_len..).unwrap_or(""))
.collect()
}
fn lines_with_min_indent(input: &str) -> (Vec<&str>, usize) {
let mut lines = Vec::new();
let mut min_indent: Option<usize> = None;
for line in input.lines() {
lines.push(line);
if !line.trim().is_empty() {
let indent = line.len() - line.trim_start().len();
min_indent = Some(min_indent.map_or(indent, |m| m.min(indent)));
}
}
(lines, min_indent.unwrap_or(0))
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::prelude::*;
use gpui::TestAppContext;
use unindent::Unindent;
#[gpui::test]
fn test_replace_consistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
let y = 10;
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("New value: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 42;
println!("New value: {}", x);
let y = 10;
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_inconsistent_indentation(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if condition {
println!("{}", 43);
}
}
"#
.unindent();
let old = r#"
if condition {
println!("{}", 43);
"#
.unindent();
let new = r#"
if condition {
println!("{}", 42);
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_with_empty_lines(cx: &mut TestAppContext) {
// Test with empty lines
let whole = r#"
fn test() {
let x = 5;
println!("x = {}", x);
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("New x: {}", x);
"#
.unindent();
let expected = r#"
fn test() {
let x = 10;
println!("New x: {}", x);
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_no_match(cx: &mut TestAppContext) {
// Test with no match
let whole = r#"
fn test() {
let x = 5;
}
"#
.unindent();
let old = r#"
let y = 10;
"#
.unindent();
let new = r#"
let y = 20;
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[gpui::test]
fn test_replace_whole_ends_before_matching_old(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
let x = 5;
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 10;
println!("x = {}", x);
"#
.unindent();
// Should return None because whole doesn't fully contain the old text
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
None
);
}
#[test]
fn test_lines_with_min_indent() {
// Empty string
assert_eq!(lines_with_min_indent(""), (vec![], 0));
// Single line without indentation
assert_eq!(lines_with_min_indent("hello"), (vec!["hello"], 0));
// Multiple lines with no indentation
assert_eq!(
lines_with_min_indent("line1\nline2\nline3"),
(vec!["line1", "line2", "line3"], 0)
);
// Multiple lines with consistent indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Multiple lines with varying indentation
assert_eq!(
lines_with_min_indent(" line1\n line2\n line3"),
(vec![" line1", " line2", " line3"], 2)
);
// Lines with mixed indentation and empty lines
assert_eq!(
lines_with_min_indent(" line1\n\n line2"),
(vec![" line1", "", " line2"], 2)
);
}
#[gpui::test]
fn test_replace_with_missing_indent_uneven_match(cx: &mut TestAppContext) {
let whole = r#"
fn test() {
if true {
let x = 5;
println!("x = {}", x);
}
}
"#
.unindent();
let old = r#"
let x = 5;
println!("x = {}", x);
"#
.unindent();
let new = r#"
let x = 42;
println!("x = {}", x);
"#
.unindent();
let expected = r#"
fn test() {
if true {
let x = 42;
println!("x = {}", x);
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[gpui::test]
fn test_replace_big_example(cx: &mut TestAppContext) {
let whole = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
}
"#
.unindent();
let old = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
"#
.unindent();
let new = r#"
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
"#
.unindent();
let expected = r#"
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_age() {
assert!(is_valid_age(0));
assert!(!is_valid_age(151));
}
#[test]
fn test_group_people_by_age() {
let people = vec![
Person::new("Young One", 5, "young@example.com").unwrap(),
Person::new("Teen One", 15, "teen@example.com").unwrap(),
Person::new("Teen Two", 18, "teen2@example.com").unwrap(),
Person::new("Adult One", 25, "adult@example.com").unwrap(),
];
let groups = group_people_by_age(&people);
assert_eq!(groups.get(&0).unwrap().len(), 1); // One person in 0-9
assert_eq!(groups.get(&10).unwrap().len(), 2); // Two people in 10-19
assert_eq!(groups.get(&20).unwrap().len(), 1); // One person in 20-29
}
}
"#
.unindent();
assert_eq!(
test_replace_with_flexible_indent(cx, &whole, &old, &new),
Some(expected.to_string())
);
}
#[test]
fn test_drop_lines_prefix() {
// Empty array
assert_eq!(drop_lines_prefix(&[], 2), Vec::<&str>::new());
// Zero prefix length
assert_eq!(
drop_lines_prefix(&["line1", "line2"], 0),
vec!["line1", "line2"]
);
// Normal prefix drop
assert_eq!(
drop_lines_prefix(&[" line1", " line2"], 2),
vec!["line1", "line2"]
);
// Prefix longer than some lines
assert_eq!(drop_lines_prefix(&[" line1", "a"], 2), vec!["line1", ""]);
// Prefix longer than all lines
assert_eq!(drop_lines_prefix(&["a", "b"], 5), vec!["", ""]);
// Mixed length lines
assert_eq!(
drop_lines_prefix(&[" line1", " line2", " line3"], 2),
vec![" line1", "line2", " line3"]
);
}
fn test_replace_with_flexible_indent(
cx: &mut TestAppContext,
whole: &str,
old: &str,
new: &str,
) -> Option<String> {
// Create a local buffer with the test content
let buffer = cx.new(|cx| language::Buffer::local(whole, cx));
// Get the buffer snapshot
let buffer_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
// Call replace_flexible and transform the result
replace_with_flexible_indent(old, new, &buffer_snapshot).map(|diff| {
buffer.update(cx, |buffer, cx| {
let _ = buffer.apply_diff(diff, cx);
buffer.text()
})
})
}
}

View File

@@ -70,39 +70,30 @@ impl Tool for ReadFileTool {
return Task::ready(Err(anyhow!("Path not found in project")));
};
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let buffer = cx
.update(|cx| {
project.update(cx, |project, cx| project.open_buffer(project_path, cx))
})?
.await?;
let result = buffer.read_with(&cx, |buffer, _cx| {
if buffer
.file()
.map_or(false, |file| file.disk_state().exists())
{
let text = buffer.text();
let string = if input.start_line.is_some() || input.end_line.is_some() {
let start = input.start_line.unwrap_or(1);
let lines = text.split('\n').skip(start - 1);
if let Some(end) = input.end_line {
let count = end.saturating_sub(start);
Itertools::intersperse(lines.take(count), "\n").collect()
} else {
Itertools::intersperse(lines, "\n").collect()
}
let result = buffer.read_with(cx, |buffer, _cx| {
let text = buffer.text();
if input.start_line.is_some() || input.end_line.is_some() {
let start = input.start_line.unwrap_or(1);
let lines = text.split('\n').skip(start - 1);
if let Some(end) = input.end_line {
let count = end.saturating_sub(start);
Itertools::intersperse(lines.take(count), "\n").collect()
} else {
text
};
Ok(string)
Itertools::intersperse(lines, "\n").collect()
}
} else {
Err(anyhow!("File does not exist"))
text
}
})??;
})?;
action_log.update(&mut cx, |log, cx| {
action_log.update(cx, |log, cx| {
log.buffer_read(buffer, cx);
})?;

View File

@@ -73,7 +73,7 @@ impl Tool for RegexSearchTool {
let results = project.update(cx, |project, cx| project.search(query, cx));
cx.spawn(|cx| async move {
cx.spawn(async move|cx| {
futures::pin_mut!(results);
let mut output = String::new();
@@ -86,7 +86,7 @@ impl Tool for RegexSearchTool {
continue;
}
buffer.read_with(&cx, |buffer, cx| -> Result<(), anyhow::Error> {
buffer.read_with(cx, |buffer, cx| -> Result<(), anyhow::Error> {
if let Some(path) = buffer.file().map(|file| file.full_path(cx)) {
let mut file_header_written = false;
let mut ranges = ranges

View File

@@ -252,11 +252,9 @@ impl AutoUpdater {
}
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move {
loop {
this.update(&mut cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
}
cx.spawn(async move |this, cx| loop {
this.update(cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
})
}
@@ -267,9 +265,9 @@ impl AutoUpdater {
cx.notify();
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
self.pending_poll = Some(cx.spawn(async move |this, cx| {
let result = Self::update(this.upgrade()?, cx.clone()).await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
log::error!("auto-update failed: error:{:?}", error);

View File

@@ -64,7 +64,7 @@ fn view_release_notes_locally(
workspace
.with_local_workspace(window, cx, move |_, window, cx| {
cx.spawn_in(window, |workspace, mut cx| async move {
cx.spawn_in(window, async move |workspace, cx| {
let markdown = markdown.await.log_err();
let response = client.get(&url, Default::default(), true).await;
let Some(mut response) = response.log_err() else {
@@ -79,7 +79,7 @@ fn view_release_notes_locally(
if let Ok(body) = body {
workspace
.update_in(&mut cx, |workspace, window, cx| {
.update_in(cx, |workspace, window, cx| {
let project = workspace.project().clone();
let buffer = project.update(cx, |project, cx| {
project.create_local_buffer("", markdown, cx)
@@ -130,7 +130,7 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
return;
};
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
cx.update(|cx| {

View File

@@ -1080,12 +1080,12 @@ impl BufferDiff {
let complete_on_drop = util::defer(|| {
tx.send(()).ok();
});
cx.spawn(|_, mut cx| async move {
cx.spawn(async move |_, cx| {
let snapshot = snapshot.await;
let Some(this) = this.upgrade() else {
return;
};
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.set_state(snapshot, &buffer);
})
.log_err();

View File

@@ -54,10 +54,10 @@ impl OneAtATime {
{
let (tx, rx) = oneshot::channel();
self.cancel.replace(tx);
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
futures::select_biased! {
_ = rx.fuse() => Ok(None),
result = f(cx).fuse() => result.map(Some),
result = f(cx.clone()).fuse() => result.map(Some),
}
})
}
@@ -192,19 +192,19 @@ impl ActiveCall {
};
let invite = if let Some(room) = room {
cx.spawn(move |_, mut cx| async move {
cx.spawn(async move |_, cx| {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
room.update(cx, |room, cx| room.share_project(initial_project, cx))?
.await?,
)
} else {
None
};
room.update(&mut cx, move |room, cx| {
room.update(cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
})?
.await?;
@@ -215,7 +215,7 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let room = cx
.spawn(move |this, mut cx| async move {
.spawn(async move |this, cx| {
let create_room = async {
let room = cx
.update(|cx| {
@@ -229,14 +229,14 @@ impl ActiveCall {
})?
.await?;
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?;
anyhow::Ok(room)
};
let room = create_room.await;
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
this.update(cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new)
})
.shared();
@@ -247,10 +247,10 @@ impl ActiveCall {
})
};
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = invite.await;
if result.is_ok() {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.report_call_event("Participant Invited", cx)
})?;
} else {
@@ -258,7 +258,7 @@ impl ActiveCall {
log::error!("invite failed: {:?}", result);
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_invites.remove(&called_user_id);
cx.notify();
})?;
@@ -315,11 +315,11 @@ impl ActiveCall {
._join_debouncer
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.report_call_event("Incoming Call Accepted", cx)
})?;
Ok(())
@@ -363,13 +363,11 @@ impl ActiveCall {
Room::join_channel(channel_id, client, user_store, cx).await
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.report_call_event("Channel Joined", cx)
})?;
this.update(cx, |this, cx| this.report_call_event("Channel Joined", cx))?;
Ok(room)
})
}

View File

@@ -128,7 +128,11 @@ impl Room {
let maintain_connection = cx.spawn({
let client = client.clone();
move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
async move |this, cx| {
Self::maintain_connection(this, client.clone(), cx)
.log_err()
.await
}
});
Audio::play_sound(Sound::Joined, cx);
@@ -172,7 +176,7 @@ impl Room {
user_store: Entity<UserStore>,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
cx.spawn(move |mut cx| async move {
cx.spawn(async move |cx| {
let response = client.request(proto::CreateRoom {}).await?;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -192,7 +196,7 @@ impl Room {
let initial_project_id = if let Some(initial_project) = initial_project {
let initial_project_id = room
.update(&mut cx, |room, cx| {
.update(cx, |room, cx| {
room.share_project(initial_project.clone(), cx)
})?
.await?;
@@ -202,7 +206,7 @@ impl Room {
};
let did_join = room
.update(&mut cx, |room, cx| {
.update(cx, |room, cx| {
room.leave_when_empty = true;
room.call(called_user_id, initial_project_id, cx)
})?
@@ -358,7 +362,7 @@ impl Room {
async fn maintain_connection(
this: WeakEntity<Self>,
client: Arc<Client>,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<()> {
let mut client_status = client.status();
loop {
@@ -370,7 +374,7 @@ impl Room {
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
})?;
@@ -386,7 +390,7 @@ impl Room {
log::info!("client reconnected, attempting to rejoin room");
let Some(this) = this.upgrade() else { break };
match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
match this.update(cx, |this, cx| this.rejoin(cx)) {
Ok(task) => {
if task.await.log_err().is_some() {
return true;
@@ -435,7 +439,7 @@ impl Room {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, leaving room");
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
this.update(cx, |this, cx| this.leave(cx))?.await?;
}
Err(anyhow!(
"can't reconnect to room: client failed to re-establish connection"
@@ -490,12 +494,12 @@ impl Room {
rejoined_projects,
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)?;
@@ -577,7 +581,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
let role = role.into();
cx.spawn(|_, _| async move {
cx.spawn(async move |_, _| {
client
.request(proto::SetRoomParticipantRole {
room_id,
@@ -709,11 +713,11 @@ impl Room {
user_store.get_users(pending_participant_user_ids, cx),
)
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let (remote_participants, pending_participants) =
futures::join!(remote_participants, pending_participants);
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.participant_user_ids.clear();
if let Some(participant) = local_participant {
@@ -1116,7 +1120,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
self.pending_call_count += 1;
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = client
.request(proto::Call {
room_id,
@@ -1124,7 +1128,7 @@ impl Room {
initial_project_id,
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_call_count -= 1;
if this.should_leave() {
this.leave(cx).detach_and_log_err(cx);
@@ -1145,11 +1149,11 @@ impl Room {
let client = self.client.clone();
let user_store = self.user_store.clone();
cx.emit(Event::RemoteProjectJoined { project_id: id });
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let project =
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade() {
!project.read(cx).is_disconnected(cx)
@@ -1178,15 +1182,13 @@ impl Room {
is_ssh_project: project.read(cx).is_via_ssh(),
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = request.await?;
project.update(&mut cx, |project, cx| {
project.shared(response.project_id, cx)
})??;
project.update(cx, |project, cx| project.shared(response.project_id, cx))??;
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.shared_projects.insert(project.downgrade());
let active_project = this.local_participant.active_project.as_ref();
if active_project.map_or(false, |location| *location == project) {
@@ -1342,7 +1344,7 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
let publication = participant
@@ -1355,7 +1357,7 @@ impl Room {
)
.await
.map_err(|error| anyhow!("failed to publish track: {error}"));
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1428,7 +1430,7 @@ impl Room {
let sources = cx.screen_capture_sources();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let sources = sources.await??;
let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
@@ -1446,7 +1448,7 @@ impl Room {
.await
.map_err(|error| anyhow!("error publishing screen track {error:?}"));
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1639,7 +1641,7 @@ fn spawn_room_connection(
cx: &mut Context<'_, Room>,
) {
if let Some(connection_info) = livekit_connection_info {
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let (room, mut events) = livekit::Room::connect(
&connection_info.server_url,
&connection_info.token,
@@ -1647,11 +1649,11 @@ fn spawn_room_connection(
)
.await?;
this.update(&mut cx, |this, cx| {
let _handle_updates = cx.spawn(|this, mut cx| async move {
this.update(cx, |this, cx| {
let _handle_updates = cx.spawn(async move |this, cx| {
while let Some(event) = events.recv().await {
if this
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
this.livekit_room_updated(event, cx).warn_on_err();
})
.is_err()

View File

@@ -47,10 +47,10 @@ impl OneAtATime {
{
let (tx, rx) = oneshot::channel();
self.cancel.replace(tx);
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
futures::select_biased! {
_ = rx.fuse() => Ok(None),
result = f(cx).fuse() => result.map(Some),
result = f(cx.clone()).fuse() => result.map(Some),
}
})
}
@@ -185,19 +185,19 @@ impl ActiveCall {
};
let invite = if let Some(room) = room {
cx.spawn(move |_, mut cx| async move {
cx.spawn(async move |_, cx| {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
room.update(cx, |room, cx| room.share_project(initial_project, cx))?
.await?,
)
} else {
None
};
room.update(&mut cx, move |room, cx| {
room.update(cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
})?
.await?;
@@ -208,7 +208,7 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let room = cx
.spawn(move |this, mut cx| async move {
.spawn(async move |this, cx| {
let create_room = async {
let room = cx
.update(|cx| {
@@ -222,14 +222,14 @@ impl ActiveCall {
})?
.await?;
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
this.update(cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?;
anyhow::Ok(room)
};
let room = create_room.await;
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
this.update(cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new)
})
.shared();
@@ -240,10 +240,10 @@ impl ActiveCall {
})
};
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = invite.await;
if result.is_ok() {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.report_call_event("Participant Invited", cx)
})?;
} else {
@@ -251,7 +251,7 @@ impl ActiveCall {
log::error!("invite failed: {:?}", result);
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_invites.remove(&called_user_id);
cx.notify();
})?;
@@ -304,15 +304,15 @@ impl ActiveCall {
let room_id = call.room_id;
let client = self.client.clone();
let user_store = self.user_store.clone();
let join = self
._join_debouncer
.spawn(cx, move |cx| Room::join(room_id, client, user_store, cx));
let join = self._join_debouncer.spawn(cx, move |mut cx| async move {
Room::join(room_id, client, user_store, &mut cx).await
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.report_call_event("Incoming Call Accepted", cx)
})?;
Ok(())
@@ -352,17 +352,15 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let join = self._join_debouncer.spawn(cx, move |cx| async move {
Room::join_channel(channel_id, client, user_store, cx).await
let join = self._join_debouncer.spawn(cx, move |mut cx| async move {
Room::join_channel(channel_id, client, user_store, &mut cx).await
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
this.update(cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.report_call_event("Channel Joined", cx)
})?;
this.update(cx, |this, cx| this.report_call_event("Channel Joined", cx))?;
Ok(room)
})
}

View File

@@ -115,7 +115,7 @@ impl Room {
let mut status = room.status();
// Consume the initial status of the room.
let _ = status.try_recv();
let _maintain_room = cx.spawn(|this, mut cx| async move {
let _maintain_room = cx.spawn(async move |this, cx| {
while let Some(status) = status.next().await {
let this = if let Some(this) = this.upgrade() {
this
@@ -124,8 +124,7 @@ impl Room {
};
if status == livekit_client_macos::ConnectionState::Disconnected {
this.update(&mut cx, |this, cx| this.leave(cx).log_err())
.ok();
this.update(cx, |this, cx| this.leave(cx).log_err()).ok();
break;
}
}
@@ -133,7 +132,7 @@ impl Room {
let _handle_updates = cx.spawn({
let room = room.clone();
move |this, mut cx| async move {
async move |this, cx| {
let mut updates = room.updates();
while let Some(update) = updates.next().await {
let this = if let Some(this) = this.upgrade() {
@@ -142,7 +141,7 @@ impl Room {
break;
};
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.live_kit_room_updated(update, cx).log_err()
})
.ok();
@@ -151,9 +150,9 @@ impl Room {
});
let connect = room.connect(&connection_info.server_url, &connection_info.token);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
connect.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if this.can_use_microphone() {
if let Some(live_kit) = &this.live_kit {
if !live_kit.muted_by_user && !live_kit.deafened {
@@ -184,7 +183,11 @@ impl Room {
let maintain_connection = cx.spawn({
let client = client.clone();
move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
async move |this, cx| {
Self::maintain_connection(this, client.clone(), cx)
.log_err()
.await
}
});
Audio::play_sound(Sound::Joined, cx);
@@ -228,7 +231,7 @@ impl Room {
user_store: Entity<UserStore>,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
cx.spawn(move |mut cx| async move {
cx.spawn(async move |cx| {
let response = client.request(proto::CreateRoom {}).await?;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -248,7 +251,7 @@ impl Room {
let initial_project_id = if let Some(initial_project) = initial_project {
let initial_project_id = room
.update(&mut cx, |room, cx| {
.update(cx, |room, cx| {
room.share_project(initial_project.clone(), cx)
})?
.await?;
@@ -258,7 +261,7 @@ impl Room {
};
let did_join = room
.update(&mut cx, |room, cx| {
.update(cx, |room, cx| {
room.leave_when_empty = true;
room.call(called_user_id, initial_project_id, cx)
})?
@@ -274,7 +277,7 @@ impl Room {
channel_id: ChannelId,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<Entity<Self>> {
Self::from_join_response(
client
@@ -292,7 +295,7 @@ impl Room {
room_id: u64,
client: Arc<Client>,
user_store: Entity<UserStore>,
cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<Entity<Self>> {
Self::from_join_response(
client.request(proto::JoinRoom { id: room_id }).await?,
@@ -333,7 +336,7 @@ impl Room {
response: proto::JoinRoomResponse,
client: Arc<Client>,
user_store: Entity<UserStore>,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<Entity<Self>> {
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
let room = cx.new(|cx| {
@@ -346,7 +349,7 @@ impl Room {
cx,
)
})?;
room.update(&mut cx, |room, cx| {
room.update(cx, |room, cx| {
room.leave_when_empty = room.channel_id.is_none();
room.apply_room_update(room_proto, cx)?;
anyhow::Ok(())
@@ -414,7 +417,7 @@ impl Room {
async fn maintain_connection(
this: WeakEntity<Self>,
client: Arc<Client>,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<()> {
let mut client_status = client.status();
loop {
@@ -426,7 +429,7 @@ impl Room {
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
this.status = RoomStatus::Rejoining;
cx.notify();
})?;
@@ -442,7 +445,7 @@ impl Room {
log::info!("client reconnected, attempting to rejoin room");
let Some(this) = this.upgrade() else { break };
match this.update(&mut cx, |this, cx| this.rejoin(cx)) {
match this.update(cx, |this, cx| this.rejoin(cx)) {
Ok(task) => {
if task.await.log_err().is_some() {
return true;
@@ -491,7 +494,7 @@ impl Room {
// we leave the room and return an error.
if let Some(this) = this.upgrade() {
log::info!("reconnection failed, leaving room");
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
this.update(cx, |this, cx| this.leave(cx))?.await?;
}
Err(anyhow!(
"can't reconnect to room: client failed to re-establish connection"
@@ -546,12 +549,12 @@ impl Room {
rejoined_projects,
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = response.await?;
let message_id = response.message_id;
let response = response.payload;
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.status = RoomStatus::Online;
this.apply_room_update(room_proto, cx)?;
@@ -633,7 +636,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
let role = role.into();
cx.spawn(|_, _| async move {
cx.spawn(async move |_, _| {
client
.request(proto::SetRoomParticipantRole {
room_id,
@@ -736,11 +739,11 @@ impl Room {
)
});
self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
self.pending_room_update = Some(cx.spawn(async move |this, cx| {
let (remote_participants, pending_participants) =
futures::join!(remote_participants, pending_participants);
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.participant_user_ids.clear();
if let Some(participant) = local_participant {
@@ -1136,7 +1139,7 @@ impl Room {
let client = self.client.clone();
let room_id = self.id;
self.pending_call_count += 1;
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = client
.request(proto::Call {
room_id,
@@ -1144,7 +1147,7 @@ impl Room {
initial_project_id,
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.pending_call_count -= 1;
if this.should_leave() {
this.leave(cx).detach_and_log_err(cx);
@@ -1165,11 +1168,11 @@ impl Room {
let client = self.client.clone();
let user_store = self.user_store.clone();
cx.emit(Event::RemoteProjectJoined { project_id: id });
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let project =
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade() {
!project.read(cx).is_disconnected(cx)
@@ -1198,15 +1201,13 @@ impl Room {
is_ssh_project: project.read(cx).is_via_ssh(),
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = request.await?;
project.update(&mut cx, |project, cx| {
project.shared(response.project_id, cx)
})??;
project.update(cx, |project, cx| project.shared(response.project_id, cx))??;
// If the user's location is in this project, it changes from UnsharedProject to SharedProject.
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.shared_projects.insert(project.downgrade());
let active_project = this.local_participant.active_project.as_ref();
if active_project.map_or(false, |location| *location == project) {
@@ -1348,12 +1349,12 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let publish_track = async {
let track = LocalAudioTrack::create();
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, _| {
.update(cx, |this, _| {
this.live_kit
.as_ref()
.map(|live_kit| live_kit.room.publish_audio_track(track))
@@ -1364,7 +1365,7 @@ impl Room {
let publication = publish_track.await;
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()
@@ -1424,7 +1425,7 @@ impl Room {
return Task::ready(Err(anyhow!("live-kit was not initialized")));
};
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let publish_track = async {
let displays = displays.await?;
let display = displays
@@ -1433,7 +1434,7 @@ impl Room {
let track = LocalVideoTrack::screen_share_for_display(display);
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, _| {
.update(cx, |this, _| {
this.live_kit
.as_ref()
.map(|live_kit| live_kit.room.publish_video_track(track))
@@ -1445,7 +1446,7 @@ impl Room {
let publication = publish_track.await;
this.upgrade()
.ok_or_else(|| anyhow!("room was dropped"))?
.update(&mut cx, |this, cx| {
.update(cx, |this, cx| {
let live_kit = this
.live_kit
.as_mut()

View File

@@ -47,7 +47,7 @@ impl ChannelBuffer {
client: Arc<Client>,
user_store: Entity<UserStore>,
channel_store: Entity<ChannelStore>,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<Entity<Self>> {
let response = client
.request(proto::JoinChannelBuffer {
@@ -66,7 +66,7 @@ impl ChannelBuffer {
let capability = channel_store.read(cx).channel_capability(channel.id);
language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text)
})?;
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
let subscription = client.subscribe_to_entity(channel.id.0)?;
@@ -208,7 +208,7 @@ impl ChannelBuffer {
let client = self.client.clone();
let epoch = self.epoch();
self.acknowledge_task = Some(cx.spawn(move |_, cx| async move {
self.acknowledge_task = Some(cx.spawn(async move |_, cx| {
cx.background_executor()
.timer(ACKNOWLEDGE_DEBOUNCE_INTERVAL)
.await;

View File

@@ -106,7 +106,7 @@ impl ChannelChat {
channel_store: Entity<ChannelStore>,
user_store: Entity<UserStore>,
client: Arc<Client>,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) -> Result<Entity<Self>> {
let channel_id = channel.id;
let subscription = client.subscribe_to_entity(channel_id.0).unwrap();
@@ -132,7 +132,7 @@ impl ChannelChat {
last_acknowledged_id: None,
rng: StdRng::from_entropy(),
first_loaded_message_id: None,
_subscription: subscription.set_entity(&cx.entity(), &mut cx.to_async()),
_subscription: subscription.set_entity(&cx.entity(), &cx.to_async()),
}
})?;
Self::handle_loaded_messages(
@@ -141,7 +141,7 @@ impl ChannelChat {
client,
response.messages,
response.done,
&mut cx,
cx,
)
.await?;
Ok(handle)
@@ -205,7 +205,7 @@ impl ChannelChat {
let outgoing_messages_lock = self.outgoing_messages_lock.clone();
// todo - handle messages that fail to send (e.g. >1024 chars)
Ok(cx.spawn(move |this, mut cx| async move {
Ok(cx.spawn(async move |this, cx| {
let outgoing_message_guard = outgoing_messages_lock.lock().await;
let request = rpc.request(proto::SendChannelMessage {
channel_id: channel_id.0,
@@ -218,8 +218,8 @@ impl ChannelChat {
drop(outgoing_message_guard);
let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
let id = response.id;
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
let message = ChannelMessage::from_proto(response, &user_store, cx).await?;
this.update(cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
if this.first_loaded_message_id.is_none() {
this.first_loaded_message_id = Some(id);
@@ -234,9 +234,9 @@ impl ChannelChat {
channel_id: self.channel_id.0,
message_id: id,
});
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
response.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.message_removed(id, cx);
})?;
Ok(())
@@ -266,7 +266,7 @@ impl ChannelChat {
nonce: Some(nonce.into()),
mentions: mentions_to_proto(&message.mentions),
});
Ok(cx.spawn(move |_, _| async move {
Ok(cx.spawn(async move |_, _| {
request.await?;
Ok(())
}))
@@ -281,7 +281,7 @@ impl ChannelChat {
let user_store = self.user_store.clone();
let channel_id = self.channel_id;
let before_message_id = self.first_loaded_message_id()?;
Some(cx.spawn(move |this, mut cx| {
Some(cx.spawn(async move |this, cx| {
async move {
let response = rpc
.request(proto::GetChannelMessages {
@@ -295,13 +295,14 @@ impl ChannelChat {
rpc,
response.messages,
response.done,
&mut cx,
cx,
)
.await?;
anyhow::Ok(())
}
.log_err()
.await
}))
}
@@ -439,7 +440,7 @@ impl ChannelChat {
let user_store = self.user_store.clone();
let rpc = self.rpc.clone();
let channel_id = self.channel_id;
cx.spawn(move |this, mut cx| {
cx.spawn(async move |this, cx| {
async move {
let response = rpc
.request(proto::JoinChannelChat {
@@ -452,11 +453,11 @@ impl ChannelChat {
rpc.clone(),
response.messages,
response.done,
&mut cx,
cx,
)
.await?;
let pending_messages = this.update(&mut cx, |this, _| {
let pending_messages = this.update(cx, |this, _| {
this.pending_messages().cloned().collect::<Vec<_>>()
})?;
@@ -472,10 +473,10 @@ impl ChannelChat {
let message = ChannelMessage::from_proto(
response.message.ok_or_else(|| anyhow!("invalid message"))?,
&user_store,
&mut cx,
cx,
)
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
})?;
}
@@ -483,6 +484,7 @@ impl ChannelChat {
anyhow::Ok(())
}
.log_err()
.await
})
.detach();
}

View File

@@ -164,22 +164,22 @@ impl ChannelStore {
let mut connection_status = client.status();
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
let watch_connection_status = cx.spawn(|this, mut cx| async move {
let watch_connection_status = cx.spawn(async move |this, cx| {
while let Some(status) = connection_status.next().await {
let this = this.upgrade()?;
match status {
client::Status::Connected { .. } => {
this.update(&mut cx, |this, cx| this.handle_connect(cx))
this.update(cx, |this, cx| this.handle_connect(cx))
.ok()?
.await
.log_err()?;
}
client::Status::SignedOut | client::Status::UpgradeRequired => {
this.update(&mut cx, |this, cx| this.handle_disconnect(false, cx))
this.update(cx, |this, cx| this.handle_disconnect(false, cx))
.ok();
}
_ => {
this.update(&mut cx, |this, cx| this.handle_disconnect(true, cx))
this.update(cx, |this, cx| this.handle_disconnect(true, cx))
.ok();
}
}
@@ -200,13 +200,12 @@ impl ChannelStore {
_rpc_subscriptions: rpc_subscriptions,
_watch_connection_status: watch_connection_status,
disconnect_channel_buffers_task: None,
_update_channels: cx.spawn(|this, mut cx| async move {
_update_channels: cx.spawn(async move |this, cx| {
maybe!(async move {
while let Some(update_channels) = update_channels_rx.next().await {
if let Some(this) = this.upgrade() {
let update_task = this.update(&mut cx, |this, cx| {
this.update_channels(update_channels, cx)
})?;
let update_task = this
.update(cx, |this, cx| this.update_channels(update_channels, cx))?;
if let Some(update_task) = update_task {
update_task.await.log_err();
}
@@ -310,7 +309,9 @@ impl ChannelStore {
self.open_channel_resource(
channel_id,
|this| &mut this.opened_buffers,
|channel, cx| ChannelBuffer::new(channel, client, user_store, channel_store, cx),
async move |channel, cx| {
ChannelBuffer::new(channel, client, user_store, channel_store, cx).await
},
cx,
)
}
@@ -328,14 +329,14 @@ impl ChannelStore {
.request(proto::GetChannelMessagesById { message_ids }),
)
};
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
if let Some(request) = request {
let response = request.await?;
let this = this
.upgrade()
.ok_or_else(|| anyhow!("channel store dropped"))?;
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
ChannelMessage::from_proto_vec(response.messages, &user_store, &mut cx).await
let user_store = this.update(cx, |this, _| this.user_store.clone())?;
ChannelMessage::from_proto_vec(response.messages, &user_store, cx).await
} else {
Ok(Vec::new())
}
@@ -440,7 +441,7 @@ impl ChannelStore {
self.open_channel_resource(
channel_id,
|this| &mut this.opened_chats,
|channel, cx| ChannelChat::new(channel, this, user_store, client, cx),
async move |channel, cx| ChannelChat::new(channel, this, user_store, client, cx).await,
cx,
)
}
@@ -450,7 +451,7 @@ impl ChannelStore {
/// Make sure that the resource is only opened once, even if this method
/// is called multiple times with the same channel id while the first task
/// is still running.
fn open_channel_resource<T, F, Fut>(
fn open_channel_resource<T, F>(
&mut self,
channel_id: ChannelId,
get_map: fn(&mut Self) -> &mut HashMap<ChannelId, OpenEntityHandle<T>>,
@@ -458,8 +459,7 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<Entity<T>>>
where
F: 'static + FnOnce(Arc<Channel>, AsyncApp) -> Fut,
Fut: Future<Output = Result<Entity<T>>>,
F: AsyncFnOnce(Arc<Channel>, &mut AsyncApp) -> Result<Entity<T>> + 'static,
T: 'static,
{
let task = loop {
@@ -479,8 +479,8 @@ impl ChannelStore {
},
hash_map::Entry::Vacant(e) => {
let task = cx
.spawn(move |this, mut cx| async move {
let channel = this.update(&mut cx, |this, _| {
.spawn(async move |this, cx| {
let channel = this.update(cx, |this, _| {
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
Arc::new(anyhow!("no channel for id: {}", channel_id))
})
@@ -493,9 +493,9 @@ impl ChannelStore {
e.insert(OpenEntityHandle::Loading(task.clone()));
cx.spawn({
let task = task.clone();
move |this, mut cx| async move {
async move |this, cx| {
let result = task.await;
this.update(&mut cx, |this, _| match result {
this.update(cx, |this, _| match result {
Ok(model) => {
get_map(this).insert(
channel_id,
@@ -570,7 +570,7 @@ impl ChannelStore {
) -> Task<Result<ChannelId>> {
let client = self.client.clone();
let name = name.trim_start_matches('#').to_owned();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = client
.request(proto::CreateChannel {
name,
@@ -583,7 +583,7 @@ impl ChannelStore {
.ok_or_else(|| anyhow!("missing channel in response"))?;
let channel_id = ChannelId(channel.id);
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let task = this.update_channels(
proto::UpdateChannels {
channels: vec![channel],
@@ -611,7 +611,7 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(move |_, _| async move {
cx.spawn(async move |_, _| {
let _ = client
.request(proto::MoveChannel {
channel_id: channel_id.0,
@@ -630,7 +630,7 @@ impl ChannelStore {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let client = self.client.clone();
cx.spawn(move |_, _| async move {
cx.spawn(async move |_, _| {
let _ = client
.request(proto::SetChannelVisibility {
channel_id: channel_id.0,
@@ -655,7 +655,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = client
.request(proto::InviteChannelMember {
channel_id: channel_id.0,
@@ -664,7 +664,7 @@ impl ChannelStore {
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -687,7 +687,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = client
.request(proto::RemoveChannelMember {
channel_id: channel_id.0,
@@ -695,7 +695,7 @@ impl ChannelStore {
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -717,7 +717,7 @@ impl ChannelStore {
cx.notify();
let client = self.client.clone();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let result = client
.request(proto::SetChannelMemberRole {
channel_id: channel_id.0,
@@ -726,7 +726,7 @@ impl ChannelStore {
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.outgoing_invites.remove(&(channel_id, user_id));
cx.notify();
})?;
@@ -744,7 +744,7 @@ impl ChannelStore {
) -> Task<Result<()>> {
let client = self.client.clone();
let name = new_name.to_string();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let channel = client
.request(proto::RenameChannel {
channel_id: channel_id.0,
@@ -753,7 +753,7 @@ impl ChannelStore {
.await?
.channel
.ok_or_else(|| anyhow!("missing channel in response"))?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let task = this.update_channels(
proto::UpdateChannels {
channels: vec![channel],
@@ -799,7 +799,7 @@ impl ChannelStore {
) -> Task<Result<Vec<ChannelMembership>>> {
let client = self.client.clone();
let user_store = self.user_store.downgrade();
cx.spawn(move |_, mut cx| async move {
cx.spawn(async move |_, cx| {
let response = client
.request(proto::GetChannelMembers {
channel_id: channel_id.0,
@@ -807,7 +807,7 @@ impl ChannelStore {
limit: limit as u64,
})
.await?;
user_store.update(&mut cx, |user_store, _| {
user_store.update(cx, |user_store, _| {
user_store.insert(response.users);
response
.members
@@ -931,10 +931,10 @@ impl ChannelStore {
buffers: buffer_versions,
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let mut response = response.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.opened_buffers.retain(|_, buffer| match buffer {
OpenEntityHandle::Open(channel_buffer) => {
let Some(channel_buffer) = channel_buffer.upgrade() else {
@@ -1006,13 +1006,13 @@ impl ChannelStore {
cx.notify();
self.did_subscribe = false;
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
if wait_for_reconnect {
cx.background_executor().timer(RECONNECT_TIMEOUT).await;
}
if let Some(this) = this.upgrade() {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
for (_, buffer) in this.opened_buffers.drain() {
if let OpenEntityHandle::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade() {
@@ -1136,10 +1136,10 @@ impl ChannelStore {
let users = self
.user_store
.update(cx, |user_store, cx| user_store.get_users(all_user_ids, cx));
Some(cx.spawn(|this, mut cx| async move {
Some(cx.spawn(async move |this, cx| {
let users = users.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
for entry in &channel_participants {
let mut participants: Vec<_> = entry
.participant_user_ids

View File

@@ -144,9 +144,9 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignIn, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(
|cx| async move { client.authenticate_and_connect(true, &cx).log_err().await },
)
cx.spawn(async move |cx| {
client.authenticate_and_connect(true, &cx).log_err().await
})
.detach();
}
}
@@ -156,7 +156,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &SignOut, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
client.sign_out(&cx).await;
})
.detach();
@@ -168,7 +168,7 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = client.clone();
move |_: &Reconnect, cx| {
if let Some(client) = client.upgrade() {
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
client.reconnect(&cx);
})
.detach();
@@ -640,7 +640,7 @@ impl Client {
}
Status::ConnectionLost => {
let this = self.clone();
state._reconnect_task = Some(cx.spawn(move |cx| async move {
state._reconnect_task = Some(cx.spawn(async move |cx| {
#[cfg(any(test, feature = "test-support"))]
let mut rng = StdRng::seed_from_u64(0);
#[cfg(not(any(test, feature = "test-support")))]
@@ -964,13 +964,11 @@ impl Client {
cx.spawn({
let this = self.clone();
|cx| {
async move {
while let Some(message) = incoming.next().await {
this.handle_message(message, &cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
}
async move |cx| {
while let Some(message) = incoming.next().await {
this.handle_message(message, &cx);
// Don't starve the main thread when receiving lots of messages at once.
smol::future::yield_now().await;
}
}
})
@@ -978,23 +976,21 @@ impl Client {
cx.spawn({
let this = self.clone();
move |cx| async move {
match handle_io.await {
Ok(()) => {
if *this.status().borrow()
== (Status::Connected {
connection_id,
peer_id,
})
{
this.set_status(Status::SignedOut, &cx);
}
}
Err(err) => {
log::error!("connection error: {:?}", err);
this.set_status(Status::ConnectionLost, &cx);
async move |cx| match handle_io.await {
Ok(()) => {
if *this.status().borrow()
== (Status::Connected {
connection_id,
peer_id,
})
{
this.set_status(Status::SignedOut, &cx);
}
}
Err(err) => {
log::error!("connection error: {:?}", err);
this.set_status(Status::ConnectionLost, &cx);
}
}
})
.detach();
@@ -1178,12 +1174,12 @@ impl Client {
pub fn authenticate_with_browser(self: &Arc<Self>, cx: &AsyncApp) -> Task<Result<Credentials>> {
let http = self.http.clone();
let this = self.clone();
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
let background = cx.background_executor().clone();
let (open_url_tx, open_url_rx) = oneshot::channel::<String>();
cx.update(|cx| {
cx.spawn(move |cx| async move {
cx.spawn(async move |cx| {
let url = open_url_rx.await?;
cx.update(|cx| cx.open_url(&url))
})
@@ -1545,25 +1541,23 @@ impl Client {
original_sender_id,
type_name
);
cx.spawn(move |_| async move {
match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
client_id,
original_sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
original_sender_id,
type_name,
error
);
}
cx.spawn(async move |_| match future.await {
Ok(()) => {
log::debug!(
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
client_id,
original_sender_id,
type_name
);
}
Err(error) => {
log::error!(
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
client_id,
original_sender_id,
type_name,
error
);
}
})
.detach();

View File

@@ -44,7 +44,7 @@ impl FakeServer {
let state = Arc::downgrade(&server.state);
move |cx| {
let state = state.clone();
cx.spawn(move |_| async move {
cx.spawn(async move |_| {
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
let mut state = state.lock();
state.auth_count += 1;
@@ -63,7 +63,7 @@ impl FakeServer {
let peer = peer.clone();
let state = state.clone();
let credentials = credentials.clone();
cx.spawn(move |cx| async move {
cx.spawn(async move |cx| {
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
if state.lock().forbid_connections {

View File

@@ -168,11 +168,10 @@ impl UserStore {
invite_info: None,
client: Arc::downgrade(&client),
update_contacts_tx,
_maintain_contacts: cx.spawn(|this, mut cx| async move {
_maintain_contacts: cx.spawn(async move |this, cx| {
let _subscriptions = rpc_subscriptions;
while let Some(message) = update_contacts_rx.next().await {
if let Ok(task) =
this.update(&mut cx, |this, cx| this.update_contacts(message, cx))
if let Ok(task) = this.update(cx, |this, cx| this.update_contacts(message, cx))
{
task.log_err().await;
} else {
@@ -180,7 +179,7 @@ impl UserStore {
}
}
}),
_maintain_current_user: cx.spawn(|this, mut cx| async move {
_maintain_current_user: cx.spawn(async move |this, cx| {
let mut status = client.status();
let weak = Arc::downgrade(&client);
drop(client);
@@ -192,10 +191,9 @@ impl UserStore {
match status {
Status::Connected { .. } => {
if let Some(user_id) = client.user_id() {
let fetch_user = if let Ok(fetch_user) = this
.update(&mut cx, |this, cx| {
this.get_user(user_id, cx).log_err()
}) {
let fetch_user = if let Ok(fetch_user) =
this.update(cx, |this, cx| this.get_user(user_id, cx).log_err())
{
fetch_user
} else {
break;
@@ -239,12 +237,12 @@ impl UserStore {
current_user_tx.send(user).await.ok();
this.update(&mut cx, |_, cx| cx.notify())?;
this.update(cx, |_, cx| cx.notify())?;
}
}
Status::SignedOut => {
current_user_tx.send(None).await.ok();
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.accepted_tos_at = None;
cx.emit(Event::PrivateUserInfoUpdated);
cx.notify();
@@ -253,7 +251,7 @@ impl UserStore {
.await;
}
Status::ConnectionLost => {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
cx.notify();
this.clear_contacts()
})?
@@ -350,7 +348,7 @@ impl UserStore {
user_ids.extend(message.outgoing_requests.iter());
let load_users = self.get_users(user_ids.into_iter().collect(), cx);
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
load_users.await?;
// Users are fetched in parallel above and cached in call to get_users
@@ -360,25 +358,22 @@ impl UserStore {
.upgrade()
.ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
for contact in message.contacts {
updated_contacts.push(Arc::new(
Contact::from_proto(contact, &this, &mut cx).await?,
));
updated_contacts
.push(Arc::new(Contact::from_proto(contact, &this, cx).await?));
}
let mut incoming_requests = Vec::new();
for request in message.incoming_requests {
incoming_requests.push({
this.update(&mut cx, |this, cx| {
this.get_user(request.requester_id, cx)
})?
.await?
this.update(cx, |this, cx| this.get_user(request.requester_id, cx))?
.await?
});
}
let mut outgoing_requests = Vec::new();
for requested_user_id in message.outgoing_requests {
outgoing_requests.push(
this.update(&mut cx, |this, cx| this.get_user(requested_user_id, cx))?
this.update(cx, |this, cx| this.get_user(requested_user_id, cx))?
.await?,
);
}
@@ -390,7 +385,7 @@ impl UserStore {
let removed_outgoing_requests =
HashSet::<u64>::from_iter(message.remove_outgoing_requests.iter().copied());
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
// Remove contacts
this.contacts
.retain(|contact| !removed_contacts.contains(&contact.user.id));
@@ -543,7 +538,7 @@ impl UserStore {
cx: &Context<Self>,
) -> Task<Result<()>> {
let client = self.client.upgrade();
cx.spawn(move |_, _| async move {
cx.spawn(async move |_, _| {
client
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
.request(proto::RespondToContactRequest {
@@ -565,12 +560,12 @@ impl UserStore {
*self.pending_contact_requests.entry(user_id).or_insert(0) += 1;
cx.notify();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
let response = client
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
.request(request)
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Entry::Occupied(mut request_count) =
this.pending_contact_requests.entry(user_id)
{
@@ -614,9 +609,9 @@ impl UserStore {
let mut user_ids_to_fetch = user_ids.clone();
user_ids_to_fetch.retain(|id| !self.users.contains_key(id));
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
if !user_ids_to_fetch.is_empty() {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.load_users(
proto::GetUsers {
user_ids: user_ids_to_fetch,
@@ -627,7 +622,7 @@ impl UserStore {
.await?;
}
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
user_ids
.iter()
.map(|user_id| {
@@ -668,9 +663,9 @@ impl UserStore {
}
let load_users = self.get_users(vec![user_id], cx);
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
load_users.await?;
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.users
.get(&user_id)
.cloned()
@@ -708,14 +703,14 @@ impl UserStore {
};
let client = self.client.clone();
cx.spawn(move |this, mut cx| async move {
cx.spawn(async move |this, cx| {
if let Some(client) = client.upgrade() {
let response = client
.request(proto::AcceptTermsOfService {})
.await
.context("error accepting tos")?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
cx.emit(Event::PrivateUserInfoUpdated);
})
@@ -737,12 +732,12 @@ impl UserStore {
cx: &Context<Self>,
) -> Task<Result<Vec<Arc<User>>>> {
let client = self.client.clone();
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
if let Some(rpc) = client.upgrade() {
let response = rpc.request(request).await.context("error loading users")?;
let users = response.users;
this.update(&mut cx, |this, _| this.insert(users))
this.update(cx, |this, _| this.insert(users))
} else {
Ok(Vec::new())
}
@@ -796,8 +791,8 @@ impl UserStore {
}
if !missing_user_ids.is_empty() {
let this = self.weak_self.clone();
cx.spawn(|mut cx| async move {
this.update(&mut cx, |this, cx| this.get_users(missing_user_ids, cx))?
cx.spawn(async move |cx| {
this.update(cx, |this, cx| this.get_users(missing_user_ids, cx))?
.await
})
.detach_and_log_err(cx);

View File

@@ -660,10 +660,6 @@ fn for_snowflake(
e.event_type.clone(),
serde_json::to_value(&e.event_properties).unwrap(),
),
Event::AssistantThreadFeedback(e) => (
"Assistant Feedback".to_string(),
serde_json::to_value(&e).unwrap(),
),
};
if let serde_json::Value::Object(ref mut map) = event_properties {

View File

@@ -562,7 +562,7 @@ async fn test_channel_buffers_and_server_restarts(
deterministic.run_until_parked();
// Client C can't reconnect.
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
// Server stops.
server.reset().await;

View File

@@ -103,7 +103,6 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
}),
)
.await;
client_a.fs().recalculate_git_status(Path::new("/a/.git"));
cx_b.run_until_parked();
project_b.update(cx_b, |project, cx| {

View File

@@ -14,8 +14,6 @@ use client::{User, RECEIVE_TIMEOUT};
use collections::{HashMap, HashSet};
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::{channel::mpsc, StreamExt as _};
use prompt_store::PromptBuilder;
use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode};
use gpui::{
px, size, App, BackgroundExecutor, Entity, Modifiers, MouseButton, MouseDownEvent,
@@ -30,11 +28,13 @@ use language::{
};
use lsp::LanguageServerId;
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use project::{
lsp_store::{FormatTrigger, LspFormatTarget},
search::{SearchQuery, SearchResult},
DiagnosticSummary, HoverBlockKind, Project, ProjectPath,
};
use prompt_store::PromptBuilder;
use rand::prelude::*;
use serde_json::json;
use settings::SettingsStore;
@@ -983,7 +983,7 @@ async fn test_server_restarts(
server.reset().await;
// Users A and B reconnect to the call. User C has troubles reconnecting, so it leaves the room.
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
@@ -1156,9 +1156,9 @@ async fn test_server_restarts(
server.reset().await;
// Users A and B have troubles reconnecting, so they leave the room.
client_a.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_b.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_a.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_b.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
client_c.override_establish_connection(|_, cx| cx.spawn(async |_| future::pending().await));
executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
@@ -2623,13 +2623,13 @@ async fn test_git_diff_base_change(
});
// Create remote buffer
let buffer_remote_a = project_remote
let remote_buffer_a = project_remote
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_a = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(buffer_remote_a.clone(), cx)
p.open_unstaged_diff(remote_buffer_a.clone(), cx)
})
.await
.unwrap();
@@ -2637,7 +2637,7 @@ async fn test_git_diff_base_change(
// Wait remote buffer to catch up to the new diff
executor.run_until_parked();
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
let buffer = remote_buffer_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
@@ -2653,13 +2653,13 @@ async fn test_git_diff_base_change(
// Open uncommitted changes on the guest, without opening them on the host first
let remote_uncommitted_diff_a = project_remote
.update(cx_b, |p, cx| {
p.open_uncommitted_diff(buffer_remote_a.clone(), cx)
p.open_uncommitted_diff(remote_buffer_a.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
let buffer = remote_buffer_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(committed_text.as_str())
@@ -2703,8 +2703,9 @@ async fn test_git_diff_base_change(
);
});
// Guest receives index text update
remote_unstaged_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
let buffer = remote_buffer_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
@@ -2718,7 +2719,7 @@ async fn test_git_diff_base_change(
});
remote_uncommitted_diff_a.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_a.read(cx);
let buffer = remote_buffer_a.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_committed_text.as_str())
@@ -2783,20 +2784,20 @@ async fn test_git_diff_base_change(
});
// Create remote buffer
let buffer_remote_b = project_remote
let remote_buffer_b = project_remote
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
.await
.unwrap();
let remote_unstaged_diff_b = project_remote
.update(cx_b, |p, cx| {
p.open_unstaged_diff(buffer_remote_b.clone(), cx)
p.open_unstaged_diff(remote_buffer_b.clone(), cx)
})
.await
.unwrap();
executor.run_until_parked();
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_b.read(cx);
let buffer = remote_buffer_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(staged_text.as_str())
@@ -2832,7 +2833,7 @@ async fn test_git_diff_base_change(
});
remote_unstaged_diff_b.read_with(cx_b, |diff, cx| {
let buffer = buffer_remote_b.read(cx);
let buffer = remote_buffer_b.read(cx);
assert_eq!(
diff.base_text_string().as_deref(),
Some(new_staged_text.as_str())
@@ -2957,15 +2958,38 @@ async fn test_git_status_sync(
.insert_tree(
"/dir",
json!({
".git": {},
"a.txt": "a",
"b.txt": "b",
".git": {},
"a.txt": "a",
"b.txt": "b",
"c.txt": "c",
}),
)
.await;
const A_TXT: &str = "a.txt";
const B_TXT: &str = "b.txt";
// Initially, a.txt is uncommitted, but present in the index,
// and b.txt is unmerged.
client_a.fs().set_head_for_repo(
"/dir/.git".as_ref(),
&[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())],
);
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
&[
("a.txt".into(), "".into()),
("b.txt".into(), "B".into()),
("c.txt".into(), "c".into()),
],
);
client_a.fs().set_unmerged_paths_for_repo(
"/dir/.git".as_ref(),
&[(
"b.txt".into(),
UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
second_head: UnmergedStatusCode::Deleted,
},
)],
);
const A_STATUS_START: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Added,
@@ -2976,14 +3000,6 @@ async fn test_git_status_sync(
second_head: UnmergedStatusCode::Deleted,
});
client_a.fs().set_status_for_repo_via_git_operation(
Path::new("/dir/.git"),
&[
(Path::new(A_TXT), A_STATUS_START),
(Path::new(B_TXT), B_STATUS_START),
],
);
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| {
@@ -2999,7 +3015,7 @@ async fn test_git_status_sync(
#[track_caller]
fn assert_status(
file: &impl AsRef<Path>,
file: impl AsRef<Path>,
status: Option<FileStatus>,
project: &Project,
cx: &App,
@@ -3013,13 +3029,15 @@ async fn test_git_status_sync(
}
project_local.read_with(cx_a, |project, cx| {
assert_status(&Path::new(A_TXT), Some(A_STATUS_START), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_START), project, cx);
assert_status("a.txt", Some(A_STATUS_START), project, cx);
assert_status("b.txt", Some(B_STATUS_START), project, cx);
assert_status("c.txt", None, project, cx);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status(&Path::new(A_TXT), Some(A_STATUS_START), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_START), project, cx);
assert_status("a.txt", Some(A_STATUS_START), project, cx);
assert_status("b.txt", Some(B_STATUS_START), project, cx);
assert_status("c.txt", None, project, cx);
});
const A_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
@@ -3028,30 +3046,42 @@ async fn test_git_status_sync(
});
const B_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Deleted,
worktree_status: StatusCode::Unmodified,
worktree_status: StatusCode::Added,
});
const C_STATUS_END: FileStatus = FileStatus::Tracked(TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Modified,
});
client_a.fs().set_status_for_repo_via_working_copy_change(
Path::new("/dir/.git"),
&[
(Path::new(A_TXT), A_STATUS_END),
(Path::new(B_TXT), B_STATUS_END),
],
// Delete b.txt from the index, mark conflict as resolved,
// and modify c.txt in the working copy.
client_a.fs().set_index_for_repo(
"/dir/.git".as_ref(),
&[("a.txt".into(), "a".into()), ("c.txt".into(), "c".into())],
);
client_a
.fs()
.set_unmerged_paths_for_repo("/dir/.git".as_ref(), &[]);
client_a
.fs()
.atomic_write("/dir/c.txt".into(), "CC".into())
.await
.unwrap();
// Wait for buffer_local_a to receive it
executor.run_until_parked();
// Smoke test status reading
project_local.read_with(cx_a, |project, cx| {
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
});
project_remote.read_with(cx_b, |project, cx| {
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
});
// And synchronization while joining
@@ -3059,8 +3089,9 @@ async fn test_git_status_sync(
executor.run_until_parked();
project_remote_c.read_with(cx_c, |project, cx| {
assert_status(&Path::new(A_TXT), Some(A_STATUS_END), project, cx);
assert_status(&Path::new(B_TXT), Some(B_STATUS_END), project, cx);
assert_status("a.txt", Some(A_STATUS_END), project, cx);
assert_status("b.txt", Some(B_STATUS_END), project, cx);
assert_status("c.txt", Some(C_STATUS_END), project, cx);
});
}

View File

@@ -128,7 +128,6 @@ enum GitOperation {
WriteGitStatuses {
repo_path: PathBuf,
statuses: Vec<(PathBuf, FileStatus)>,
git_operation: bool,
},
}
@@ -987,7 +986,6 @@ impl RandomizedTest for ProjectCollaborationTest {
GitOperation::WriteGitStatuses {
repo_path,
statuses,
git_operation,
} => {
if !client.fs().directories(false).contains(&repo_path) {
return Err(TestError::Inapplicable);
@@ -1016,17 +1014,9 @@ impl RandomizedTest for ProjectCollaborationTest {
client.fs().create_dir(&dot_git_dir).await?;
}
if git_operation {
client.fs().set_status_for_repo_via_git_operation(
&dot_git_dir,
statuses.as_slice(),
);
} else {
client.fs().set_status_for_repo_via_working_copy_change(
&dot_git_dir,
statuses.as_slice(),
);
}
client
.fs()
.set_status_for_repo(&dot_git_dir, statuses.as_slice());
}
},
}
@@ -1455,18 +1445,13 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
}
64..=100 => {
let file_paths = generate_file_paths(&repo_path, rng, client);
let statuses = file_paths
.into_iter()
.map(|path| (path, gen_status(rng)))
.collect::<Vec<_>>();
let git_operation = rng.gen::<bool>();
GitOperation::WriteGitStatuses {
repo_path,
statuses,
git_operation,
}
}
_ => unreachable!(),
@@ -1605,15 +1590,24 @@ fn gen_file_name(rng: &mut StdRng) -> String {
}
fn gen_status(rng: &mut StdRng) -> FileStatus {
fn gen_status_code(rng: &mut StdRng) -> StatusCode {
match rng.gen_range(0..7) {
0 => StatusCode::Modified,
1 => StatusCode::TypeChanged,
2 => StatusCode::Added,
3 => StatusCode::Deleted,
4 => StatusCode::Renamed,
5 => StatusCode::Copied,
6 => StatusCode::Unmodified,
fn gen_tracked_status(rng: &mut StdRng) -> TrackedStatus {
match rng.gen_range(0..3) {
0 => TrackedStatus {
index_status: StatusCode::Unmodified,
worktree_status: StatusCode::Unmodified,
},
1 => TrackedStatus {
index_status: StatusCode::Modified,
worktree_status: StatusCode::Modified,
},
2 => TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Modified,
},
3 => TrackedStatus {
index_status: StatusCode::Added,
worktree_status: StatusCode::Unmodified,
},
_ => unreachable!(),
}
}
@@ -1627,17 +1621,12 @@ fn gen_status(rng: &mut StdRng) -> FileStatus {
}
}
match rng.gen_range(0..4) {
0 => FileStatus::Untracked,
1 => FileStatus::Ignored,
2 => FileStatus::Unmerged(UnmergedStatus {
match rng.gen_range(0..2) {
0 => FileStatus::Unmerged(UnmergedStatus {
first_head: gen_unmerged_status_code(rng),
second_head: gen_unmerged_status_code(rng),
}),
3 => FileStatus::Tracked(TrackedStatus {
index_status: gen_status_code(rng),
worktree_status: gen_status_code(rng),
}),
1 => FileStatus::Tracked(gen_tracked_status(rng)),
_ => unreachable!(),
}
}

View File

@@ -208,8 +208,8 @@ impl TestServer {
.unwrap()
.set_id(user_id.to_proto())
.override_authenticate(move |cx| {
cx.spawn(|_| async move {
let access_token = "the-token".to_string();
let access_token = "the-token".to_string();
cx.spawn(async move |_| {
Ok(Credentials {
user_id: user_id.to_proto(),
access_token,
@@ -230,7 +230,7 @@ impl TestServer {
let connection_killers = connection_killers.clone();
let forbid_connections = forbid_connections.clone();
let client_name = client_name.clone();
cx.spawn(move |cx| async move {
cx.spawn(async move |cx| {
if forbid_connections.load(SeqCst) {
Err(EstablishConnectionError::other(anyhow!(
"server is forbidding connections"

View File

@@ -64,9 +64,9 @@ impl ChannelView {
window,
cx,
);
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let channel_view = channel_view.await?;
pane.update_in(&mut cx, |pane, window, cx| {
pane.update_in(cx, |pane, window, cx| {
telemetry::event!(
"Channel Notes Opened",
channel_id,
@@ -90,10 +90,10 @@ impl ChannelView {
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
let channel_view = Self::load(channel_id, workspace, window, cx);
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let channel_view = channel_view.await?;
pane.update_in(&mut cx, |pane, window, cx| {
pane.update_in(cx, |pane, window, cx| {
let buffer_id = channel_view.read(cx).channel_buffer.read(cx).remote_id(cx);
let existing_view = pane
@@ -166,11 +166,11 @@ impl ChannelView {
let channel_buffer =
channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let channel_buffer = channel_buffer.await?;
let markdown = markdown.await.log_err();
channel_buffer.update(&mut cx, |channel_buffer, cx| {
channel_buffer.update(cx, |channel_buffer, cx| {
channel_buffer.buffer().update(cx, |buffer, cx| {
buffer.set_language_registry(language_registry);
let Some(markdown) = markdown else {
@@ -583,10 +583,10 @@ impl FollowableItem for ChannelView {
let open = ChannelView::load(ChannelId(state.channel_id), workspace, window, cx);
Some(window.spawn(cx, |mut cx| async move {
Some(window.spawn(cx, async move |cx| {
let this = open.await?;
let task = this.update_in(&mut cx, |this, window, cx| {
let task = this.update_in(cx, |this, window, cx| {
this.remote_id = Some(remote_id);
if let Some(state) = state.editor {

View File

@@ -199,7 +199,7 @@ impl ChatPanel {
workspace: WeakEntity<Workspace>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let serialized_panel = if let Some(panel) = cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(CHAT_PANEL_KEY) })
.await
@@ -211,7 +211,7 @@ impl ChatPanel {
None
};
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
let panel = Self::new(workspace, window, cx);
if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| {
@@ -867,10 +867,10 @@ impl ChatPanel {
})
});
cx.spawn(|this, mut cx| async move {
cx.spawn(async move |this, cx| {
let chat = open_chat.await?;
let highlight_message_id = scroll_to_message_id;
let scroll_to_message_id = this.update(&mut cx, |this, cx| {
let scroll_to_message_id = this.update(cx, |this, cx| {
this.set_active_chat(chat.clone(), cx);
scroll_to_message_id.or(this.last_acknowledged_message_id)
@@ -881,11 +881,11 @@ impl ChatPanel {
ChannelChat::load_history_since_message(chat.clone(), message_id, cx.clone())
.await
{
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let Some(highlight_message_id) = highlight_message_id {
let task = cx.spawn(|this, mut cx| async move {
let task = cx.spawn(async move |this, cx| {
cx.background_executor().timer(Duration::from_secs(2)).await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.highlighted_message.take();
cx.notify();
})

View File

@@ -137,11 +137,9 @@ impl MessageEditor {
.detach();
let markdown = language_registry.language_for_name("Markdown");
cx.spawn_in(window, |_, mut cx| async move {
cx.spawn_in(window, async move |_, cx| {
let markdown = markdown.await.context("failed to load Markdown language")?;
buffer.update(&mut cx, |buffer, cx| {
buffer.set_language(Some(markdown), cx)
})
buffer.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx))
})
.detach_and_log_err(cx);
@@ -232,7 +230,7 @@ impl MessageEditor {
) {
if let language::BufferEvent::Reparsed | language::BufferEvent::Edited = event {
let buffer = buffer.read(cx).snapshot();
self.mentions_task = Some(cx.spawn_in(window, |this, cx| async move {
self.mentions_task = Some(cx.spawn_in(window, async move |this, cx| {
cx.background_executor()
.timer(MENTIONS_DEBOUNCE_INTERVAL)
.await;
@@ -251,7 +249,7 @@ impl MessageEditor {
self.collect_mention_candidates(buffer, end_anchor, cx)
{
if !candidates.is_empty() {
return cx.spawn(|_, cx| async move {
return cx.spawn(async move |_, cx| {
Ok(Some(
Self::resolve_completions_for_candidates(
&cx,
@@ -270,7 +268,7 @@ impl MessageEditor {
self.collect_emoji_candidates(buffer, end_anchor, cx)
{
if !candidates.is_empty() {
return cx.spawn(|_, cx| async move {
return cx.spawn(async move |_, cx| {
Ok(Some(
Self::resolve_completions_for_candidates(
&cx,
@@ -453,7 +451,7 @@ impl MessageEditor {
async fn find_mentions(
this: WeakEntity<MessageEditor>,
buffer: BufferSnapshot,
mut cx: AsyncWindowContext,
cx: &mut AsyncWindowContext,
) {
let (buffer, ranges) = cx
.background_spawn(async move {
@@ -462,7 +460,7 @@ impl MessageEditor {
})
.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let mut anchor_ranges = Vec::new();
let mut mentioned_user_ids = Vec::new();
let mut text = String::new();

View File

@@ -1569,9 +1569,9 @@ impl CollabPanel {
channel_store.create_channel(&channel_name, *location, cx)
});
if location.is_none() {
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
let channel_id = create.await?;
this.update_in(&mut cx, |this, window, cx| {
this.update_in(cx, |this, window, cx| {
this.show_channel_modal(
channel_id,
channel_modal::Mode::InviteMembers,
@@ -1944,8 +1944,8 @@ impl CollabPanel {
let user_store = self.user_store.clone();
let channel_store = self.channel_store.clone();
cx.spawn_in(window, |_, mut cx| async move {
workspace.update_in(&mut cx, |workspace, window, cx| {
cx.spawn_in(window, async move |_, cx| {
workspace.update_in(cx, |workspace, window, cx| {
workspace.toggle_modal(window, cx, |window, cx| {
ChannelModal::new(
user_store.clone(),
@@ -1976,11 +1976,11 @@ impl CollabPanel {
&["Leave", "Cancel"],
cx,
);
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
if answer.await? != 0 {
return Ok(());
}
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.channel_store.update(cx, |channel_store, cx| {
channel_store.remove_member(channel_id, user_id, cx)
})
@@ -2009,13 +2009,13 @@ impl CollabPanel {
&["Remove", "Cancel"],
cx,
);
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
if answer.await? == 0 {
channel_store
.update(&mut cx, |channels, _| channels.remove_channel(channel_id))?
.update(cx, |channels, _| channels.remove_channel(channel_id))?
.await
.notify_async_err(&mut cx);
this.update_in(&mut cx, |_, window, cx| cx.focus_self(window))
.notify_async_err(cx);
this.update_in(cx, |_, window, cx| cx.focus_self(window))
.ok();
}
anyhow::Ok(())
@@ -2043,12 +2043,12 @@ impl CollabPanel {
&["Remove", "Cancel"],
cx,
);
cx.spawn_in(window, |_, mut cx| async move {
cx.spawn_in(window, async move |_, cx| {
if answer.await? == 0 {
user_store
.update(&mut cx, |store, cx| store.remove_contact(user_id, cx))?
.update(cx, |store, cx| store.remove_contact(user_id, cx))?
.await
.notify_async_err(&mut cx);
.notify_async_err(cx);
}
anyhow::Ok(())
})
@@ -2161,11 +2161,11 @@ impl CollabPanel {
.full_width()
.on_click(cx.listener(|this, _, window, cx| {
let client = this.client.clone();
cx.spawn_in(window, |_, mut cx| async move {
cx.spawn_in(window, async move |_, cx| {
client
.authenticate_and_connect(true, &cx)
.await
.notify_async_err(&mut cx);
.notify_async_err(cx);
})
.detach()
})),

View File

@@ -300,9 +300,9 @@ impl PickerDelegate for ChannelModalDelegate {
cx.background_executor().clone(),
));
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
picker
.update(&mut cx, |picker, cx| {
.update(cx, |picker, cx| {
let delegate = &mut picker.delegate;
delegate.matching_member_indices.clear();
delegate
@@ -316,10 +316,10 @@ impl PickerDelegate for ChannelModalDelegate {
let search_members = self.channel_store.update(cx, |store, cx| {
store.fuzzy_search_members(self.channel_id, query.clone(), 100, cx)
});
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
async {
let members = search_members.await?;
picker.update(&mut cx, |picker, cx| {
picker.update(cx, |picker, cx| {
picker.delegate.has_all_members =
query.is_empty() && members.len() < 100;
picker.delegate.matching_member_indices =
@@ -338,10 +338,10 @@ impl PickerDelegate for ChannelModalDelegate {
let search_users = self
.user_store
.update(cx, |store, cx| store.fuzzy_search_users(query, cx));
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
async {
let users = search_users.await?;
picker.update(&mut cx, |picker, cx| {
picker.update(cx, |picker, cx| {
picker.delegate.matching_users = users;
cx.notify();
})?;
@@ -489,9 +489,9 @@ impl ChannelModalDelegate {
let update = self.channel_store.update(cx, |store, cx| {
store.set_member_role(self.channel_id, user_id, new_role, cx)
});
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
update.await?;
picker.update_in(&mut cx, |picker, window, cx| {
picker.update_in(cx, |picker, window, cx| {
let this = &mut picker.delegate;
if let Some(member) = this.members.iter_mut().find(|m| m.user.id == user_id) {
member.role = new_role;
@@ -513,9 +513,9 @@ impl ChannelModalDelegate {
let update = self.channel_store.update(cx, |store, cx| {
store.remove_member(self.channel_id, user_id, cx)
});
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
update.await?;
picker.update_in(&mut cx, |picker, window, cx| {
picker.update_in(cx, |picker, window, cx| {
let this = &mut picker.delegate;
if let Some(ix) = this.members.iter_mut().position(|m| m.user.id == user_id) {
this.members.remove(ix);
@@ -551,10 +551,10 @@ impl ChannelModalDelegate {
store.invite_member(self.channel_id, user.id, ChannelRole::Member, cx)
});
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
invite_member.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
let new_member = ChannelMembership {
user,
kind: proto::channel_member::Kind::Invitee,

View File

@@ -102,10 +102,10 @@ impl PickerDelegate for ContactFinderDelegate {
.user_store
.update(cx, |store, cx| store.fuzzy_search_users(query, cx));
cx.spawn_in(window, |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
async {
let potential_contacts = search_users.await?;
picker.update(&mut cx, |picker, cx| {
picker.update(cx, |picker, cx| {
picker.delegate.potential_contacts = potential_contacts.into();
cx.notify();
})?;

View File

@@ -96,10 +96,10 @@ impl NotificationPanel {
cx.new(|cx| {
let mut status = client.status();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
while (status.next().await).is_some() {
if this
.update(&mut cx, |_: &mut Self, cx| {
.update(cx, |_: &mut Self, cx| {
cx.notify();
})
.is_err()
@@ -181,7 +181,7 @@ impl NotificationPanel {
workspace: WeakEntity<Workspace>,
cx: AsyncWindowContext,
) -> Task<Result<Entity<Self>>> {
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let serialized_panel = if let Some(panel) = cx
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(NOTIFICATION_PANEL_KEY) })
.await
@@ -193,7 +193,7 @@ impl NotificationPanel {
None
};
workspace.update_in(&mut cx, |workspace, window, cx| {
workspace.update_in(cx, |workspace, window, cx| {
let panel = Self::new(workspace, window, cx);
if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| {
@@ -445,12 +445,12 @@ impl NotificationPanel {
.entry(notification_id)
.or_insert_with(|| {
let client = self.client.clone();
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
cx.background_executor().timer(MARK_AS_READ_DELAY).await;
client
.request(proto::MarkNotificationRead { notification_id })
.await?;
this.update(&mut cx, |this, _| {
this.update(cx, |this, _| {
this.mark_as_read_tasks.remove(&notification_id);
})?;
Ok(())
@@ -556,9 +556,9 @@ impl NotificationPanel {
let notification_id = entry.id;
self.current_notification_toast = Some((
notification_id,
cx.spawn_in(window, |this, mut cx| async move {
cx.spawn_in(window, async move |this, cx| {
cx.background_executor().timer(TOAST_DURATION).await;
this.update(&mut cx, |this, cx| this.remove_toast(notification_id, cx))
this.update(cx, |this, cx| this.remove_toast(notification_id, cx))
.ok();
}),
));
@@ -643,7 +643,7 @@ impl Render for NotificationPanel {
move |_, window, cx| {
let client = client.clone();
window
.spawn(cx, move |cx| async move {
.spawn(cx, async move |cx| {
client
.authenticate_and_connect(true, &cx)
.log_err()

View File

@@ -12,12 +12,12 @@ use workspace::AppState;
pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
let app_state = Arc::downgrade(app_state);
let mut incoming_call = ActiveCall::global(cx).read(cx).incoming();
cx.spawn(|mut cx| async move {
cx.spawn(async move |cx| {
let mut notification_windows: Vec<WindowHandle<IncomingCallNotification>> = Vec::new();
while let Some(incoming_call) = incoming_call.next().await {
for window in notification_windows.drain(..) {
window
.update(&mut cx, |_, window, _| {
.update(cx, |_, window, _| {
window.remove_window();
})
.log_err();
@@ -75,7 +75,7 @@ impl IncomingCallNotificationState {
let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
let app_state = self.app_state.clone();
let cx: &mut App = cx;
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
join.await?;
if let Some(project_id) = initial_project_id {
cx.update(|cx| {

View File

@@ -327,13 +327,13 @@ impl PickerDelegate for CommandPaletteDelegate {
});
self.updating_matches = Some((task, rx.clone()));
cx.spawn_in(window, move |picker, mut cx| async move {
cx.spawn_in(window, async move |picker, cx| {
let Some((commands, matches)) = rx.recv().await else {
return;
};
picker
.update(&mut cx, |picker, cx| {
.update(cx, |picker, cx| {
picker
.delegate
.matches_updated(query, commands, matches, cx)

View File

@@ -560,7 +560,7 @@ impl SerializableItem for ComponentPreview {
let user_store = project.read(cx).user_store().clone();
let language_registry = project.read(cx).languages().clone();
window.spawn(cx, |mut cx| async move {
window.spawn(cx, async move |cx| {
let user_store = user_store.clone();
let language_registry = language_registry.clone();
let weak_workspace = workspace.clone();

View File

@@ -171,13 +171,17 @@ impl Client {
let notification_handlers = notification_handlers.clone();
let response_handlers = response_handlers.clone();
let transport = transport.clone();
move |cx| {
async move |cx| {
Self::handle_input(transport, notification_handlers, response_handlers, cx)
.log_err()
.await
}
});
let stderr_input_task = cx.spawn(|_| Self::handle_stderr(transport.clone()).log_err());
let input_task = cx.spawn(|_| async move {
let stderr_input_task = cx.spawn({
let transport = transport.clone();
async move |_| Self::handle_stderr(transport).log_err().await
});
let input_task = cx.spawn(async move |_| {
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
stdout.or(stderr)
});
@@ -217,7 +221,7 @@ impl Client {
transport: Arc<dyn Transport>,
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
response_handlers: Arc<Mutex<Option<HashMap<RequestId, ResponseHandler>>>>,
cx: AsyncApp,
cx: &mut AsyncApp,
) -> anyhow::Result<()> {
let mut receiver = transport.receive();

View File

@@ -41,16 +41,15 @@ impl ExtensionContextServerProxy for ContextServerFactoryRegistryProxy {
let id = id.clone();
let extension = extension.clone();
cx.spawn(|mut cx| async move {
let extension_project =
project.update(&mut cx, |project, cx| {
Arc::new(ExtensionProject {
worktree_ids: project
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).id().to_proto())
.collect(),
})
})?;
cx.spawn(async move |cx| {
let extension_project = project.update(cx, |project, cx| {
Arc::new(ExtensionProject {
worktree_ids: project
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).id().to_proto())
.collect(),
})
})?;
let command = extension
.context_server_command(id.clone(), extension_project)

View File

@@ -147,15 +147,15 @@ impl ContextServerManager {
if self.update_servers_task.is_some() {
self.needs_server_update = true;
} else {
self.update_servers_task = Some(cx.spawn(|this, mut cx| async move {
this.update(&mut cx, |this, _| {
self.update_servers_task = Some(cx.spawn(async move |this, cx| {
this.update(cx, |this, _| {
this.needs_server_update = false;
})?;
Self::maintain_servers(this.clone(), cx.clone()).await?;
Self::maintain_servers(this.clone(), cx).await?;
this.update(&mut cx, |this, cx| {
let has_any_context_servers = !this.servers().is_empty();
this.update(cx, |this, cx| {
let has_any_context_servers = !this.running_servers().is_empty();
if has_any_context_servers {
CommandPaletteFilter::update_global(cx, |filter, _cx| {
filter.show_namespace(CONTEXT_SERVERS_NAMESPACE);
@@ -180,19 +180,44 @@ impl ContextServerManager {
.cloned()
}
pub fn start_server(
&self,
server: Arc<ContextServer>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
cx.spawn(async move |this, cx| {
let id = server.id.clone();
server.start(&cx).await?;
this.update(cx, |_, cx| cx.emit(Event::ServerStarted { server_id: id }))?;
Ok(())
})
}
pub fn stop_server(
&self,
server: Arc<ContextServer>,
cx: &mut Context<Self>,
) -> anyhow::Result<()> {
server.stop()?;
cx.emit(Event::ServerStopped {
server_id: server.id(),
});
Ok(())
}
pub fn restart_server(
&mut self,
id: &Arc<str>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
let id = id.clone();
cx.spawn(|this, mut cx| async move {
if let Some(server) = this.update(&mut cx, |this, _cx| this.servers.remove(&id))? {
cx.spawn(async move |this, cx| {
if let Some(server) = this.update(cx, |this, _cx| this.servers.remove(&id))? {
server.stop()?;
let config = server.config();
let new_server = Arc::new(ContextServer::new(id.clone(), config));
new_server.clone().start(&cx).await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.servers.insert(id.clone(), new_server);
cx.emit(Event::ServerStopped {
server_id: id.clone(),
@@ -206,7 +231,11 @@ impl ContextServerManager {
})
}
pub fn servers(&self) -> Vec<Arc<ContextServer>> {
pub fn all_servers(&self) -> Vec<Arc<ContextServer>> {
self.servers.values().cloned().collect()
}
pub fn running_servers(&self) -> Vec<Arc<ContextServer>> {
self.servers
.values()
.filter(|server| server.client().is_some())
@@ -214,10 +243,10 @@ impl ContextServerManager {
.collect()
}
async fn maintain_servers(this: WeakEntity<Self>, mut cx: AsyncApp) -> Result<()> {
async fn maintain_servers(this: WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
let mut desired_servers = HashMap::default();
let (registry, project) = this.update(&mut cx, |this, cx| {
let (registry, project) = this.update(cx, |this, cx| {
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
settings::SettingsLocation {
worktree_id: worktree.read(cx).id(),
@@ -231,7 +260,7 @@ impl ContextServerManager {
})?;
for (id, factory) in
registry.read_with(&cx, |registry, _| registry.context_server_factories())?
registry.read_with(cx, |registry, _| registry.context_server_factories())?
{
let config = desired_servers.entry(id).or_default();
if config.command.is_none() {
@@ -244,7 +273,7 @@ impl ContextServerManager {
let mut servers_to_start = HashMap::default();
let mut servers_to_stop = HashMap::default();
this.update(&mut cx, |this, _cx| {
this.update(cx, |this, _cx| {
this.servers.retain(|id, server| {
if desired_servers.contains_key(id) {
true
@@ -270,16 +299,12 @@ impl ContextServerManager {
for (id, server) in servers_to_stop {
server.stop().log_err();
this.update(&mut cx, |_, cx| {
cx.emit(Event::ServerStopped { server_id: id })
})?;
this.update(cx, |_, cx| cx.emit(Event::ServerStopped { server_id: id }))?;
}
for (id, server) in servers_to_start {
if server.start(&cx).await.log_err().is_some() {
this.update(&mut cx, |_, cx| {
cx.emit(Event::ServerStarted { server_id: id })
})?;
this.update(cx, |_, cx| cx.emit(Event::ServerStarted { server_id: id }))?;
}
}

View File

@@ -47,13 +47,13 @@ impl StdioTransport {
let (stdout_sender, stdout_receiver) = channel::unbounded::<String>();
let (stderr_sender, stderr_receiver) = channel::unbounded::<String>();
cx.spawn(|_| Self::handle_output(stdin, stdout_receiver).log_err())
cx.spawn(async move |_| Self::handle_output(stdin, stdout_receiver).log_err().await)
.detach();
cx.spawn(|_| async move { Self::handle_input(stdout, stdin_sender).await })
cx.spawn(async move |_| Self::handle_input(stdout, stdin_sender).await)
.detach();
cx.spawn(|_| async move { Self::handle_err(stderr, stderr_sender).await })
cx.spawn(async move |_| Self::handle_err(stderr, stderr_sender).await)
.detach();
Ok(Self {

View File

@@ -226,17 +226,17 @@ impl RegisteredBuffer {
let id = buffer.entity_id();
let prev_pending_change =
mem::replace(&mut self.pending_buffer_change, Task::ready(None));
self.pending_buffer_change = cx.spawn(move |copilot, mut cx| async move {
self.pending_buffer_change = cx.spawn(async move |copilot, cx| {
prev_pending_change.await;
let old_version = copilot
.update(&mut cx, |copilot, _| {
.update(cx, |copilot, _| {
let server = copilot.server.as_authenticated().log_err()?;
let buffer = server.registered_buffers.get_mut(&id)?;
Some(buffer.snapshot.version.clone())
})
.ok()??;
let new_snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot()).ok()?;
let new_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()).ok()?;
let content_changes = cx
.background_spawn({
@@ -265,7 +265,7 @@ impl RegisteredBuffer {
.await;
copilot
.update(&mut cx, |copilot, _| {
.update(cx, |copilot, _| {
let server = copilot.server.as_authenticated().log_err()?;
let buffer = server.registered_buffers.get_mut(&id)?;
if !content_changes.is_empty() {
@@ -388,7 +388,7 @@ impl Copilot {
let node_runtime = self.node_runtime.clone();
let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn(move |this, cx| {
.spawn(async move |this, cx| {
Self::start_language_server(
server_id,
http,
@@ -398,6 +398,7 @@ impl Copilot {
awaiting_sign_in_after_start,
cx,
)
.await
})
.shared();
self.server = CopilotServer::Starting { task: start_task };
@@ -442,7 +443,7 @@ impl Copilot {
},
"copilot".into(),
Default::default(),
cx.to_async(),
&mut cx.to_async(),
);
let http = http_client::FakeHttpClient::create(|_| async { unreachable!() });
let node_runtime = NodeRuntime::unavailable();
@@ -468,7 +469,7 @@ impl Copilot {
env: Option<HashMap<String, String>>,
this: WeakEntity<Self>,
awaiting_sign_in_after_start: bool,
mut cx: AsyncApp,
cx: &mut AsyncApp,
) {
let start_language_server = async {
let server_path = get_copilot_lsp(http).await?;
@@ -495,7 +496,7 @@ impl Copilot {
root_path,
None,
Default::default(),
cx.clone(),
cx,
)?;
server
@@ -535,7 +536,7 @@ impl Copilot {
};
let server = start_language_server.await;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
cx.notify();
match server {
Ok((server, status)) => {
@@ -569,7 +570,7 @@ impl Copilot {
SignInStatus::SignedOut { .. } | SignInStatus::Unauthorized { .. } => {
let lsp = server.lsp.clone();
let task = cx
.spawn(|this, mut cx| async move {
.spawn(async move |this, cx| {
let sign_in = async {
let sign_in = lsp
.request::<request::SignInInitiate>(
@@ -581,7 +582,7 @@ impl Copilot {
Ok(request::SignInStatus::Ok { user: Some(user) })
}
request::SignInInitiateResult::PromptUserDeviceFlow(flow) => {
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if let CopilotServer::Running(RunningCopilotServer {
sign_in_status: status,
..
@@ -610,7 +611,7 @@ impl Copilot {
};
let sign_in = sign_in.await;
this.update(&mut cx, |this, cx| match sign_in {
this.update(cx, |this, cx| match sign_in {
Ok(status) => {
this.update_sign_in_status(status, cx);
Ok(())
@@ -670,7 +671,7 @@ impl Copilot {
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
let server_id = self.server_id;
move |this, cx| async move {
async move |this, cx| {
clear_copilot_dir().await;
Self::start_language_server(server_id, http, node_runtime, env, this, false, cx)
.await

View File

@@ -241,7 +241,7 @@ impl CopilotChat {
let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
let dir_path = copilot_chat_config_dir();
cx.spawn(|cx| async move {
cx.spawn(async move |cx| {
let mut parent_watch_rx = watch_config_dir(
cx.background_executor(),
fs.clone(),

View File

@@ -83,7 +83,7 @@ impl EditPredictionProvider for CopilotCompletionProvider {
cx: &mut Context<Self>,
) {
let copilot = self.copilot.clone();
self.pending_refresh = Some(cx.spawn(|this, mut cx| async move {
self.pending_refresh = Some(cx.spawn(async move |this, cx| {
if debounce {
cx.background_executor()
.timer(COPILOT_DEBOUNCE_TIMEOUT)
@@ -91,12 +91,12 @@ impl EditPredictionProvider for CopilotCompletionProvider {
}
let completions = copilot
.update(&mut cx, |copilot, cx| {
.update(cx, |copilot, cx| {
copilot.completions(&buffer, cursor_position, cx)
})?
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
if !completions.is_empty() {
this.cycled = false;
this.pending_refresh = None;
@@ -153,14 +153,14 @@ impl EditPredictionProvider for CopilotCompletionProvider {
cx.notify();
} else {
let copilot = self.copilot.clone();
self.pending_cycling_refresh = Some(cx.spawn(|this, mut cx| async move {
self.pending_cycling_refresh = Some(cx.spawn(async move |this, cx| {
let completions = copilot
.update(&mut cx, |copilot, cx| {
.update(cx, |copilot, cx| {
copilot.completions_cycling(&buffer, cursor_position, cx)
})?
.await?;
this.update(&mut cx, |this, cx| {
this.update(cx, |this, cx| {
this.cycled = true;
this.file_extension = buffer.read(cx).file().and_then(|file| {
Some(

Some files were not shown because too many files have changed in this diff Show More