Compare commits
180 Commits
configurat
...
v0.151.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba0115977d | ||
|
|
f7a0e01fb8 | ||
|
|
26d33b7139 | ||
|
|
16ee4e63b8 | ||
|
|
0c74f4e0ca | ||
|
|
7c0ad81a3d | ||
|
|
cd0841c590 | ||
|
|
f44f5c5bc7 | ||
|
|
670b3b9382 | ||
|
|
75da9af3e6 | ||
|
|
c5f43ee81c | ||
|
|
98d74f9317 | ||
|
|
7e1eac67ef | ||
|
|
950e698834 | ||
|
|
ace8734b63 | ||
|
|
7bf8d733d6 | ||
|
|
4e67d33d88 | ||
|
|
a5b82b2bf3 | ||
|
|
81eb594037 | ||
|
|
4ec1f29df0 | ||
|
|
22a791d9c7 | ||
|
|
cfc3b7de05 | ||
|
|
bef575e30a | ||
|
|
7571b1d444 | ||
|
|
f39805d529 | ||
|
|
98a3bdad57 | ||
|
|
4e0124010d | ||
|
|
048be73b22 | ||
|
|
8643b11f57 | ||
|
|
442ff94d58 | ||
|
|
37c7c99383 | ||
|
|
f633b125b9 | ||
|
|
98e09f22c2 | ||
|
|
1d868e19f2 | ||
|
|
ff26abdc2f | ||
|
|
1f0b7d45ff | ||
|
|
b2f3f760ab | ||
|
|
dc889ca7f2 | ||
|
|
8ec36f1e2b | ||
|
|
ad43bbbf5e | ||
|
|
5586397f95 | ||
|
|
60af9dd4b1 | ||
|
|
d3d0c043f5 | ||
|
|
2b08e2abe5 | ||
|
|
226ec9d404 | ||
|
|
8ec680cecb | ||
|
|
d1dceef945 | ||
|
|
88d36d8b9f | ||
|
|
9662829810 | ||
|
|
26d943287b | ||
|
|
bea6786f14 | ||
|
|
2de420a67b | ||
|
|
f64f85eb1e | ||
|
|
29745ae229 | ||
|
|
6afb36fd6f | ||
|
|
b99bf92452 | ||
|
|
f417893a7b | ||
|
|
ef22372f0b | ||
|
|
0332eaf797 | ||
|
|
c2835df898 | ||
|
|
93a7682659 | ||
|
|
3ddec4816a | ||
|
|
e2635a685e | ||
|
|
14d0f4fbb2 | ||
|
|
eb0a01e9cb | ||
|
|
635e7f6480 | ||
|
|
e8c6c537de | ||
|
|
d50cb17256 | ||
|
|
4c7c8b005d | ||
|
|
5f6726acc0 | ||
|
|
2f08a0a28c | ||
|
|
aaddb73b28 | ||
|
|
2c541aee24 | ||
|
|
afe4d8c8cc | ||
|
|
73bde398af | ||
|
|
3b0eb607ca | ||
|
|
7a964ff91a | ||
|
|
a87076e815 | ||
|
|
d67d44f600 | ||
|
|
093f131712 | ||
|
|
7936fe40ae | ||
|
|
2a03dde538 | ||
|
|
c658ad8380 | ||
|
|
46bb04a019 | ||
|
|
5ee4c036f9 | ||
|
|
a28700a74d | ||
|
|
55dda0e6af | ||
|
|
1a2a538366 | ||
|
|
28271a9a36 | ||
|
|
dd8d52f4f4 | ||
|
|
5e55d5507f | ||
|
|
14f8d3a33a | ||
|
|
29f97e2755 | ||
|
|
340662e2f7 | ||
|
|
77bb60f1d1 | ||
|
|
352c95cf0d | ||
|
|
938d93a64c | ||
|
|
12dda5fa1b | ||
|
|
783cccf95d | ||
|
|
30a677e257 | ||
|
|
a2dee8c61e | ||
|
|
935cf542ae | ||
|
|
5e869dadf9 | ||
|
|
518dd3ed3a | ||
|
|
7647644602 | ||
|
|
119e337344 | ||
|
|
82090c60ca | ||
|
|
bdf26fe38a | ||
|
|
79d8b97531 | ||
|
|
0fd5030297 | ||
|
|
46ecd7d190 | ||
|
|
88b03bc074 | ||
|
|
db4ff7da6b | ||
|
|
fb35f15526 | ||
|
|
78120cc568 | ||
|
|
4ddf2cbb9f | ||
|
|
69e76a3bb9 | ||
|
|
80c25960dd | ||
|
|
26f2369fa6 | ||
|
|
b19356ac69 | ||
|
|
7523a7a437 | ||
|
|
abc712014a | ||
|
|
e7c8dba54f | ||
|
|
99d45ba694 | ||
|
|
1447a9d48c | ||
|
|
f45af17fd4 | ||
|
|
e1b05bf7a3 | ||
|
|
c0ea806afe | ||
|
|
1404e328cf | ||
|
|
8ea8e81c86 | ||
|
|
e1c42a5c85 | ||
|
|
e17a5c1412 | ||
|
|
20f85b946d | ||
|
|
abb5800d20 | ||
|
|
4e2b08b909 | ||
|
|
c697eaba82 | ||
|
|
93642c9c51 | ||
|
|
25cdd2ad25 | ||
|
|
182b7af299 | ||
|
|
72b5cda356 | ||
|
|
912ed20a3b | ||
|
|
3d94ed3242 | ||
|
|
3a593fe803 | ||
|
|
f08be779c0 | ||
|
|
278864e19f | ||
|
|
9245015d1a | ||
|
|
b7a66e4491 | ||
|
|
59dd7c9138 | ||
|
|
3c577e1a42 | ||
|
|
bb725d3158 | ||
|
|
5250866c1a | ||
|
|
1ae96025f5 | ||
|
|
6b9fa68dc5 | ||
|
|
db0c1fd592 | ||
|
|
1e39d407c2 | ||
|
|
61ca36ecad | ||
|
|
eb9eae09b1 | ||
|
|
136f75ee9a | ||
|
|
1f8fa82ac3 | ||
|
|
8895084604 | ||
|
|
1abbe9c65d | ||
|
|
ec98e71190 | ||
|
|
1d986b0c77 | ||
|
|
feab1261c8 | ||
|
|
406d3b413d | ||
|
|
643d60f551 | ||
|
|
0229d3ccac | ||
|
|
f85ca387a7 | ||
|
|
96bcceed40 | ||
|
|
2ad9a742dd | ||
|
|
9f0438b540 | ||
|
|
d274be67d6 | ||
|
|
19f0c4af6d | ||
|
|
09c698d8d7 | ||
|
|
8a5fcc2c22 | ||
|
|
28568429aa | ||
|
|
f1778dd9de | ||
|
|
36d51fe4a5 | ||
|
|
c4c07583c3 | ||
|
|
a82cc80d1d |
@@ -3,6 +3,15 @@ export default {
|
||||
const url = new URL(request.url);
|
||||
url.hostname = "docs-anw.pages.dev";
|
||||
|
||||
// These pages were removed, but may still be served due to Cloudflare's
|
||||
// [asset retention](https://developers.cloudflare.com/pages/configuration/serving-pages/#asset-retention).
|
||||
if (
|
||||
url.pathname === "/docs/assistant/context-servers" ||
|
||||
url.pathname === "/docs/assistant/model-context-protocol"
|
||||
) {
|
||||
return await fetch("https://zed.dev/404");
|
||||
}
|
||||
|
||||
let res = await fetch(url, request);
|
||||
|
||||
if (res.status === 404) {
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -363,7 +363,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
|
||||
33
.github/workflows/delete_comments.yml
vendored
Normal file
33
.github/workflows/delete_comments.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: Delete Mediafire Comments
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
delete_comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check for specific strings in comment
|
||||
id: check_comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const comment = context.payload.comment.body;
|
||||
const triggerStrings = ['www.mediafire.com'];
|
||||
return triggerStrings.some(triggerString => comment.includes(triggerString));
|
||||
|
||||
- name: Delete comment if it contains any of the specific strings
|
||||
if: steps.check_comment.outputs.result == 'true'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const commentId = context.payload.comment.id;
|
||||
await github.rest.issues.deleteComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: commentId
|
||||
});
|
||||
8
.github/workflows/deploy_cloudflare.yml
vendored
8
.github/workflows/deploy_cloudflare.yml
vendored
@@ -21,6 +21,14 @@ jobs:
|
||||
with:
|
||||
mdbook-version: "0.4.37"
|
||||
|
||||
- name: Set up default .cargo/config.toml
|
||||
run: cp ./.cargo/collab-config.toml ./.cargo/config.toml
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libxkbcommon-dev libxkbcommon-x11-dev
|
||||
|
||||
- name: Build book
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
2
.github/workflows/release_nightly.yml
vendored
2
.github/workflows/release_nightly.yml
vendored
@@ -157,7 +157,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
|
||||
3
.mailmap
3
.mailmap
@@ -24,7 +24,8 @@ Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrsen@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com> <146845123+evrsen@users.noreply.github.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
|
||||
@@ -2,11 +2,15 @@
|
||||
{
|
||||
"label": "clippy",
|
||||
"command": "./script/clippy",
|
||||
"args": []
|
||||
"args": [],
|
||||
"allow_concurrent_runs": true,
|
||||
"use_new_terminal": false
|
||||
},
|
||||
{
|
||||
"label": "cargo run --profile release-fast",
|
||||
"command": "cargo",
|
||||
"args": ["run", "--profile", "release-fast"]
|
||||
"args": ["run", "--profile", "release-fast"],
|
||||
"allow_concurrent_runs": true,
|
||||
"use_new_terminal": false
|
||||
}
|
||||
]
|
||||
|
||||
994
Cargo.lock
generated
994
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@@ -24,6 +24,7 @@ members = [
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
"crates/diagnostics",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/editor",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
@@ -70,7 +71,6 @@ members = [
|
||||
"crates/outline",
|
||||
"crates/outline_panel",
|
||||
"crates/paths",
|
||||
"crates/performance",
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
@@ -243,7 +243,6 @@ open_ai = { path = "crates/open_ai" }
|
||||
outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
performance = { path = "crates/performance" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -305,7 +304,7 @@ zed_actions = { path = "crates/zed_actions" }
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "cacdb5bb3b72bad2c729227537979d95af75978f" }
|
||||
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "91d034ff8b53867143c005acfaa14609147c9a2c" }
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
ashpd = "0.9.1"
|
||||
@@ -321,9 +320,9 @@ async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "bf40d4f91fb56031e8676376dba2fc021b3e8eaf" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "bf40d4f91fb56031e8676376dba2fc021b3e8eaf" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "bf40d4f91fb56031e8676376dba2fc021b3e8eaf" }
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -388,7 +387,7 @@ runtimelib = { version = "0.15", default-features = false, features = [
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
schemars = {version = "0.8", features = ["impl_json_schema"]}
|
||||
schemars = { version = "0.8", features = ["impl_json_schema"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
@@ -545,6 +544,7 @@ zed = { codegen-units = 16 }
|
||||
|
||||
[profile.release-fast]
|
||||
inherits = "release"
|
||||
debug = "full"
|
||||
lto = false
|
||||
codegen-units = 16
|
||||
|
||||
|
||||
1
assets/icons/pin.svg
Normal file
1
assets/icons/pin.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pin"><path d="M12 17v5"/><path d="M9 10.76a2 2 0 0 1-1.11 1.79l-1.78.9A2 2 0 0 0 5 15.24V16a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-.76a2 2 0 0 0-1.11-1.79l-1.78-.9A2 2 0 0 1 15 10.76V7a1 1 0 0 1 1-1 2 2 0 0 0 0-4H8a2 2 0 0 0 0 4 1 1 0 0 1 1 1z"/></svg>
|
||||
|
After Width: | Height: | Size: 447 B |
1
assets/icons/unpin.svg
Normal file
1
assets/icons/unpin.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pin-off"><path d="M12 17v5"/><path d="M15 9.34V7a1 1 0 0 1 1-1 2 2 0 0 0 0-4H7.89"/><path d="m2 2 20 20"/><path d="M9 9v1.76a2 2 0 0 1-1.11 1.79l-1.78.9A2 2 0 0 0 5 15.24V16a1 1 0 0 0 1 1h11"/></svg>
|
||||
|
After Width: | Height: | Size: 401 B |
@@ -523,7 +523,7 @@
|
||||
"ctrl-alt-c": "outline_panel::CopyPath",
|
||||
"alt-ctrl-shift-c": "outline_panel::CopyRelativePath",
|
||||
"alt-ctrl-r": "outline_panel::RevealInFileManager",
|
||||
"space": "outline_panel::Open",
|
||||
"space": ["outline_panel::Open", { "change_selection": false }],
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrev"
|
||||
}
|
||||
@@ -613,11 +613,15 @@
|
||||
"ctrl-alt-space": "terminal::ShowCharacterPalette",
|
||||
"ctrl-shift-c": "terminal::Copy",
|
||||
"ctrl-insert": "terminal::Copy",
|
||||
// "ctrl-a": "editor::SelectAll", // conflicts with readline
|
||||
"ctrl-shift-v": "terminal::Paste",
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
"ctrl-shift-l": "terminal::Clear",
|
||||
"ctrl-shift-w": "pane::CloseActiveItem",
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
|
||||
@@ -536,7 +536,7 @@
|
||||
"cmd-alt-c": "outline_panel::CopyPath",
|
||||
"alt-cmd-shift-c": "outline_panel::CopyRelativePath",
|
||||
"alt-cmd-r": "outline_panel::RevealInFileManager",
|
||||
"space": "outline_panel::Open",
|
||||
"space": ["outline_panel::Open", { "change_selection": false }],
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrev"
|
||||
}
|
||||
|
||||
@@ -41,7 +41,16 @@
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
"f4": "search::SelectNextMatch",
|
||||
"shift-f4": "search::SelectPrevMatch"
|
||||
"shift-f4": "search::SelectPrevMatch",
|
||||
"alt-1": ["pane::ActivateItem", 0],
|
||||
"alt-2": ["pane::ActivateItem", 1],
|
||||
"alt-3": ["pane::ActivateItem", 2],
|
||||
"alt-4": ["pane::ActivateItem", 3],
|
||||
"alt-5": ["pane::ActivateItem", 4],
|
||||
"alt-6": ["pane::ActivateItem", 5],
|
||||
"alt-7": ["pane::ActivateItem", 6],
|
||||
"alt-8": ["pane::ActivateItem", 7],
|
||||
"alt-9": "pane::ActivateLastItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -45,7 +45,16 @@
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
"f4": "search::SelectNextMatch",
|
||||
"shift-f4": "search::SelectPrevMatch"
|
||||
"shift-f4": "search::SelectPrevMatch",
|
||||
"cmd-1": ["pane::ActivateItem", 0],
|
||||
"cmd-2": ["pane::ActivateItem", 1],
|
||||
"cmd-3": ["pane::ActivateItem", 2],
|
||||
"cmd-4": ["pane::ActivateItem", 3],
|
||||
"cmd-5": ["pane::ActivateItem", 4],
|
||||
"cmd-6": ["pane::ActivateItem", 5],
|
||||
"cmd-7": ["pane::ActivateItem", 6],
|
||||
"cmd-8": ["pane::ActivateItem", 7],
|
||||
"cmd-9": "pane::ActivateLastItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -92,6 +92,7 @@
|
||||
"g y": "editor::GoToTypeDefinition",
|
||||
"g shift-i": "editor::GoToImplementation",
|
||||
"g x": "editor::OpenUrl",
|
||||
"g f": "editor::OpenFile",
|
||||
"g n": "vim::SelectNextMatch",
|
||||
"g shift-n": "vim::SelectPreviousMatch",
|
||||
"g l": "vim::SelectNext",
|
||||
@@ -176,19 +177,19 @@
|
||||
"ctrl-w ctrl-p": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w ctrl-shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w v": "pane::SplitLeft",
|
||||
"ctrl-w ctrl-v": "pane::SplitLeft",
|
||||
"ctrl-w s": "pane::SplitUp",
|
||||
"ctrl-w shift-s": "pane::SplitUp",
|
||||
"ctrl-w ctrl-s": "pane::SplitUp",
|
||||
"ctrl-w v": "pane::SplitVertical",
|
||||
"ctrl-w ctrl-v": "pane::SplitVertical",
|
||||
"ctrl-w s": "pane::SplitHorizontal",
|
||||
"ctrl-w shift-s": "pane::SplitHorizontal",
|
||||
"ctrl-w ctrl-s": "pane::SplitHorizontal",
|
||||
"ctrl-w c": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-c": "pane::CloseAllItems",
|
||||
"ctrl-w q": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-q": "pane::CloseAllItems",
|
||||
"ctrl-w o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w n": "workspace::NewFileSplitHorizontal",
|
||||
"ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal",
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
|
||||
@@ -69,6 +69,10 @@
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"active_pane_magnification": 1.0,
|
||||
// The direction that you want to split panes horizontally. Defaults to "up"
|
||||
"pane_split_direction_horizontal": "up",
|
||||
// The direction that you want to split panes horizontally. Defaults to "left"
|
||||
"pane_split_direction_vertical": "left",
|
||||
// Centered layout related settings.
|
||||
"centered_layout": {
|
||||
// The relative width of the left padding of the central pane from the
|
||||
@@ -506,6 +510,8 @@
|
||||
// "soft_wrap": "editor_width",
|
||||
// 4. Soft wrap lines at the preferred line length.
|
||||
// "soft_wrap": "preferred_line_length",
|
||||
// 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// "soft_wrap": "bounded",
|
||||
"soft_wrap": "prefer_line",
|
||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||
// is enabled.
|
||||
@@ -724,7 +730,13 @@
|
||||
//
|
||||
"file_types": {
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "tsconfig.json"]
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
"**/zed/**/*.json",
|
||||
"**/Zed/**/*.json",
|
||||
"tsconfig.json",
|
||||
"pyrightconfig.json"
|
||||
]
|
||||
},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
@@ -838,6 +850,7 @@
|
||||
"language_servers": ["starpls", "!buck2-lsp", "..."]
|
||||
},
|
||||
"Svelte": {
|
||||
"language_servers": ["svelte-language-server", "..."],
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
@@ -861,6 +874,7 @@
|
||||
}
|
||||
},
|
||||
"Vue.js": {
|
||||
"language_servers": ["vue-language-server", "..."],
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
}
|
||||
@@ -887,7 +901,8 @@
|
||||
"api_url": "https://generativelanguage.googleapis.com"
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434"
|
||||
"api_url": "http://localhost:11434",
|
||||
"low_speed_timeout_in_seconds": 60
|
||||
},
|
||||
"openai": {
|
||||
"version": "1",
|
||||
@@ -937,6 +952,7 @@
|
||||
},
|
||||
// Vim settings
|
||||
"vim": {
|
||||
"toggle_relative_line_numbers": false,
|
||||
"use_system_clipboard": "always",
|
||||
"use_multiline_find": false,
|
||||
"use_smartcase_find": false,
|
||||
|
||||
@@ -3,10 +3,9 @@ use editor::Editor;
|
||||
use extension::ExtensionStore;
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
actions, anchored, deferred, percentage, Animation, AnimationExt as _, AppContext, CursorStyle,
|
||||
DismissEvent, EventEmitter, InteractiveElement as _, Model, ParentElement as _, Render,
|
||||
SharedString, StatefulInteractiveElement, Styled, Transformation, View, ViewContext,
|
||||
VisualContext as _,
|
||||
actions, percentage, Animation, AnimationExt as _, AppContext, CursorStyle, EventEmitter,
|
||||
InteractiveElement as _, Model, ParentElement as _, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Transformation, View, ViewContext, VisualContext as _,
|
||||
};
|
||||
use language::{
|
||||
LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId, LanguageServerName,
|
||||
@@ -14,7 +13,7 @@ use language::{
|
||||
use project::{LanguageServerProgress, Project};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ContextMenu};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -27,7 +26,7 @@ pub struct ActivityIndicator {
|
||||
statuses: Vec<LspStatus>,
|
||||
project: Model<Project>,
|
||||
auto_updater: Option<Model<AutoUpdater>>,
|
||||
context_menu: Option<View<ContextMenu>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
struct LspStatus {
|
||||
@@ -41,7 +40,6 @@ struct PendingWork<'a> {
|
||||
progress: &'a LanguageServerProgress,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Content {
|
||||
icon: Option<gpui::AnyElement>,
|
||||
message: String,
|
||||
@@ -79,7 +77,7 @@ impl ActivityIndicator {
|
||||
statuses: Default::default(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu: None,
|
||||
context_menu_handle: Default::default(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -174,7 +172,7 @@ impl ActivityIndicator {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn content_to_render(&mut self, cx: &mut ViewContext<Self>) -> Content {
|
||||
fn content_to_render(&mut self, cx: &mut ViewContext<Self>) -> Option<Content> {
|
||||
// Show any language server has pending activity.
|
||||
let mut pending_work = self.pending_language_server_work(cx);
|
||||
if let Some(PendingWork {
|
||||
@@ -203,7 +201,7 @@ impl ActivityIndicator {
|
||||
write!(&mut message, " + {} more", additional_work_count).unwrap();
|
||||
}
|
||||
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
@@ -216,7 +214,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message,
|
||||
on_click: Some(Arc::new(Self::toggle_language_server_work_context_menu)),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
@@ -235,7 +233,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
if !downloading.is_empty() {
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -243,11 +241,11 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!("Downloading {}...", downloading.join(", "),),
|
||||
on_click: None,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if !checking_for_update.is_empty() {
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -258,11 +256,11 @@ impl ActivityIndicator {
|
||||
checking_for_update.join(", "),
|
||||
),
|
||||
on_click: None,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if !failed.is_empty() {
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ExclamationTriangle)
|
||||
.size(IconSize::Small)
|
||||
@@ -275,12 +273,12 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.show_error_message(&Default::default(), cx)
|
||||
})),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Show any formatting failure
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure() {
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ExclamationTriangle)
|
||||
.size(IconSize::Small)
|
||||
@@ -290,13 +288,13 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|_, cx| {
|
||||
cx.dispatch_action(Box::new(workspace::OpenLog));
|
||||
})),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Show any application auto-update info.
|
||||
if let Some(updater) = &self.auto_updater {
|
||||
return match &updater.read(cx).status() {
|
||||
AutoUpdateStatus::Checking => Content {
|
||||
AutoUpdateStatus::Checking => Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -304,8 +302,8 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Checking for Zed updates…".to_string(),
|
||||
on_click: None,
|
||||
},
|
||||
AutoUpdateStatus::Downloading => Content {
|
||||
}),
|
||||
AutoUpdateStatus::Downloading => Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -313,8 +311,8 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Downloading Zed update…".to_string(),
|
||||
on_click: None,
|
||||
},
|
||||
AutoUpdateStatus::Installing => Content {
|
||||
}),
|
||||
AutoUpdateStatus::Installing => Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -322,8 +320,8 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Installing Zed update…".to_string(),
|
||||
on_click: None,
|
||||
},
|
||||
AutoUpdateStatus::Updated { binary_path } => Content {
|
||||
}),
|
||||
AutoUpdateStatus::Updated { binary_path } => Some(Content {
|
||||
icon: None,
|
||||
message: "Click to restart and update Zed".to_string(),
|
||||
on_click: Some(Arc::new({
|
||||
@@ -332,8 +330,8 @@ impl ActivityIndicator {
|
||||
};
|
||||
move |_, cx| workspace::reload(&reload, cx)
|
||||
})),
|
||||
},
|
||||
AutoUpdateStatus::Errored => Content {
|
||||
}),
|
||||
AutoUpdateStatus::Errored => Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ExclamationTriangle)
|
||||
.size(IconSize::Small)
|
||||
@@ -343,8 +341,8 @@ impl ActivityIndicator {
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&Default::default(), cx)
|
||||
})),
|
||||
},
|
||||
AutoUpdateStatus::Idle => Default::default(),
|
||||
}),
|
||||
AutoUpdateStatus::Idle => None,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -352,7 +350,7 @@ impl ActivityIndicator {
|
||||
ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
|
||||
{
|
||||
if let Some(extension_id) = extension_store.outstanding_operations().keys().next() {
|
||||
return Content {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Download)
|
||||
.size(IconSize::Small)
|
||||
@@ -360,80 +358,15 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!("Updating {extension_id} extension…"),
|
||||
on_click: None,
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Default::default()
|
||||
None
|
||||
}
|
||||
|
||||
fn toggle_language_server_work_context_menu(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if self.context_menu.take().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.build_lsp_work_context_menu(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn build_lsp_work_context_menu(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let mut has_work = false;
|
||||
let this = cx.view().downgrade();
|
||||
let context_menu = ContextMenu::build(cx, |mut menu, cx| {
|
||||
for work in self.pending_language_server_work(cx) {
|
||||
has_work = true;
|
||||
|
||||
let this = this.clone();
|
||||
let title = SharedString::from(
|
||||
work.progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_string(),
|
||||
);
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.to_string();
|
||||
menu = menu.custom_entry(
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(title.clone()))
|
||||
.child(Icon::new(IconName::XCircle))
|
||||
.into_any_element()
|
||||
},
|
||||
move |cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
this.context_menu.take();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
menu = menu.label(title.clone());
|
||||
}
|
||||
}
|
||||
menu
|
||||
});
|
||||
|
||||
if has_work {
|
||||
cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| {
|
||||
this.context_menu.take();
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
cx.focus_view(&context_menu);
|
||||
self.context_menu = Some(context_menu);
|
||||
cx.notify();
|
||||
}
|
||||
self.context_menu_handle.toggle(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -441,33 +374,88 @@ impl EventEmitter<Event> for ActivityIndicator {}
|
||||
|
||||
impl Render for ActivityIndicator {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let content = self.content_to_render(cx);
|
||||
|
||||
let mut result = h_flex()
|
||||
let result = h_flex()
|
||||
.id("activity-indicator")
|
||||
.on_action(cx.listener(Self::show_error_message))
|
||||
.on_action(cx.listener(Self::dismiss_error_message));
|
||||
|
||||
if let Some(on_click) = content.on_click {
|
||||
result = result
|
||||
.cursor(CursorStyle::PointingHand)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
on_click(this, cx);
|
||||
}))
|
||||
}
|
||||
|
||||
result
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.child(Label::new(SharedString::from(content.message)).size(LabelSize::Small))
|
||||
.children(self.context_menu.as_ref().map(|menu| {
|
||||
deferred(
|
||||
anchored()
|
||||
.anchor(gpui::AnchorCorner::BottomLeft)
|
||||
.child(menu.clone()),
|
||||
let Some(content) = self.content_to_render(cx) else {
|
||||
return result;
|
||||
};
|
||||
let this = cx.view().downgrade();
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
.trigger(
|
||||
ButtonLike::new("activity-indicator-trigger").child(
|
||||
h_flex()
|
||||
.id("activity-indicator-status")
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.child(Label::new(content.message).size(LabelSize::Small))
|
||||
.when_some(content.on_click, |this, handler| {
|
||||
this.on_click(cx.listener(move |this, _, cx| {
|
||||
handler(this, cx);
|
||||
}))
|
||||
.cursor(CursorStyle::PointingHand)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.with_priority(1)
|
||||
}))
|
||||
.anchor(gpui::AnchorCorner::BottomLeft)
|
||||
.menu(move |cx| {
|
||||
let strong_this = this.upgrade()?;
|
||||
let mut has_work = false;
|
||||
let menu = ContextMenu::build(cx, |mut menu, cx| {
|
||||
for work in strong_this.read(cx).pending_language_server_work(cx) {
|
||||
has_work = true;
|
||||
let this = this.clone();
|
||||
let mut title = work
|
||||
.progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_owned();
|
||||
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.to_string();
|
||||
let title = SharedString::from(title);
|
||||
menu = menu.custom_entry(
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(title.clone()))
|
||||
.child(Icon::new(IconName::XCircle))
|
||||
.into_any_element()
|
||||
},
|
||||
move |cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
this.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
if let Some(progress_message) = work.progress.message.as_ref() {
|
||||
title.push_str(": ");
|
||||
title.push_str(progress_message);
|
||||
}
|
||||
|
||||
menu = menu.label(title);
|
||||
}
|
||||
}
|
||||
menu
|
||||
});
|
||||
has_work.then_some(menu)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ pub use context_store::*;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use gpui::Context as _;
|
||||
use gpui::{actions, impl_actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
pub(crate) use inline_assistant::*;
|
||||
use language_model::{
|
||||
@@ -69,13 +69,6 @@ actions!(
|
||||
|
||||
const DEFAULT_CONTEXT_LINES: usize = 50;
|
||||
|
||||
#[derive(Clone, Default, Deserialize, PartialEq)]
|
||||
pub struct InlineAssist {
|
||||
prompt: Option<String>,
|
||||
}
|
||||
|
||||
impl_actions!(assistant, [InlineAssist]);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct MessageId(clock::Lamport);
|
||||
|
||||
|
||||
@@ -12,10 +12,11 @@ use crate::{
|
||||
slash_command_picker,
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
Assist, CacheStatus, ConfirmCommand, Context, ContextEvent, ContextId, ContextStore,
|
||||
CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssist, InlineAssistId,
|
||||
InlineAssistant, InsertIntoEditor, MessageStatus, ModelSelector, PendingSlashCommand,
|
||||
PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split,
|
||||
ToggleFocus, ToggleModelSelector, WorkflowStepResolution, WorkflowStepView,
|
||||
CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, InlineAssistant,
|
||||
InsertIntoEditor, Message, MessageId, MessageMetadata, MessageStatus, ModelSelector,
|
||||
PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata,
|
||||
SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution,
|
||||
WorkflowStepView,
|
||||
};
|
||||
use crate::{ContextStoreEvent, ModelPickerDelegate};
|
||||
use anyhow::{anyhow, Result};
|
||||
@@ -36,10 +37,10 @@ use fs::Fs;
|
||||
use gpui::{
|
||||
canvas, div, img, percentage, point, pulsating_between, size, Action, Animation, AnimationExt,
|
||||
AnyElement, AnyView, AppContext, AsyncWindowContext, ClipboardEntry, ClipboardItem,
|
||||
Context as _, DismissEvent, Empty, Entity, EntityId, EventEmitter, FocusHandle, FocusableView,
|
||||
FontWeight, InteractiveElement, IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render,
|
||||
RenderImage, SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task,
|
||||
Transformation, UpdateGlobal, View, VisualContext, WeakView, WindowContext,
|
||||
Context as _, Empty, Entity, EntityId, EventEmitter, FocusHandle, FocusableView, FontWeight,
|
||||
InteractiveElement, IntoElement, Model, ParentElement, Pixels, ReadGlobal, Render, RenderImage,
|
||||
SharedString, Size, StatefulInteractiveElement, Styled, Subscription, Task, Transformation,
|
||||
UpdateGlobal, View, VisualContext, WeakView, WindowContext,
|
||||
};
|
||||
use indexed_docs::IndexedDocsStore;
|
||||
use language::{
|
||||
@@ -82,6 +83,7 @@ use workspace::{
|
||||
ToolbarItemView, Workspace,
|
||||
};
|
||||
use workspace::{searchable::SearchableItemHandle, NewFile};
|
||||
use zed_actions::InlineAssist;
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
workspace::FollowableViewRegistry::register::<ContextEditor>(cx);
|
||||
@@ -107,29 +109,12 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(
|
||||
|terminal_panel: &mut TerminalPanel, cx: &mut ViewContext<TerminalPanel>| {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled {
|
||||
return;
|
||||
}
|
||||
|
||||
terminal_panel.register_tab_bar_button(cx.new_view(|_| InlineAssistTabBarButton), cx);
|
||||
terminal_panel.asssistant_enabled(settings.enabled, cx);
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
struct InlineAssistTabBarButton;
|
||||
|
||||
impl Render for InlineAssistTabBarButton {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
IconButton::new("terminal_inline_assistant", IconName::ZedAssistant)
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click(cx.listener(|_, _, cx| {
|
||||
cx.dispatch_action(InlineAssist::default().boxed_clone());
|
||||
}))
|
||||
.tooltip(move |cx| Tooltip::for_action("Inline Assist", &InlineAssist::default(), cx))
|
||||
}
|
||||
}
|
||||
|
||||
pub enum AssistantPanelEvent {
|
||||
ContextEdited,
|
||||
}
|
||||
@@ -349,6 +334,7 @@ impl AssistantPanel {
|
||||
model_summary_editor.clone(),
|
||||
)
|
||||
});
|
||||
|
||||
let pane = cx.new_view(|cx| {
|
||||
let mut pane = Pane::new(
|
||||
workspace.weak_handle(),
|
||||
@@ -385,6 +371,7 @@ impl AssistantPanel {
|
||||
pane.active_item()
|
||||
.map_or(false, |item| item.downcast::<ContextHistory>().is_some()),
|
||||
);
|
||||
let _pane = cx.view().clone();
|
||||
let right_children = h_flex()
|
||||
.gap(Spacing::Small.rems(cx))
|
||||
.child(
|
||||
@@ -395,32 +382,27 @@ impl AssistantPanel {
|
||||
.tooltip(|cx| Tooltip::for_action("New Context", &NewFile, cx)),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("menu", IconName::Menu)
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click(cx.listener(|pane, _, cx| {
|
||||
let zoom_label = if pane.is_zoomed() {
|
||||
PopoverMenu::new("assistant-panel-popover-menu")
|
||||
.trigger(
|
||||
IconButton::new("menu", IconName::Menu).icon_size(IconSize::Small),
|
||||
)
|
||||
.menu(move |cx| {
|
||||
let zoom_label = if _pane.read(cx).is_zoomed() {
|
||||
"Zoom Out"
|
||||
} else {
|
||||
"Zoom In"
|
||||
};
|
||||
let menu = ContextMenu::build(cx, |menu, cx| {
|
||||
menu.context(pane.focus_handle(cx))
|
||||
let focus_handle = _pane.focus_handle(cx);
|
||||
Some(ContextMenu::build(cx, move |menu, _| {
|
||||
menu.context(focus_handle.clone())
|
||||
.action("New Context", Box::new(NewFile))
|
||||
.action("History", Box::new(DeployHistory))
|
||||
.action("Prompt Library", Box::new(DeployPromptLibrary))
|
||||
.action("Configure", Box::new(ShowConfiguration))
|
||||
.action(zoom_label, Box::new(ToggleZoom))
|
||||
});
|
||||
cx.subscribe(&menu, |pane, _, _: &DismissEvent, _| {
|
||||
pane.new_item_menu = None;
|
||||
})
|
||||
.detach();
|
||||
pane.new_item_menu = Some(menu);
|
||||
})),
|
||||
}))
|
||||
}),
|
||||
)
|
||||
.when_some(pane.new_item_menu.as_ref(), |el, new_item_menu| {
|
||||
el.child(Pane::render_menu_overlay(new_item_menu))
|
||||
})
|
||||
.into_any_element()
|
||||
.into();
|
||||
|
||||
@@ -510,7 +492,7 @@ impl AssistantPanel {
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let update_model_summary = match event {
|
||||
pane::Event::Remove => {
|
||||
pane::Event::Remove { .. } => {
|
||||
cx.emit(PanelEvent::Close);
|
||||
false
|
||||
}
|
||||
@@ -881,7 +863,7 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn new_context(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ContextEditor>> {
|
||||
if self.project.read(cx).is_remote() {
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
let task = self
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
@@ -1721,6 +1703,8 @@ struct WorkflowAssist {
|
||||
assist_ids: Vec<InlineAssistId>,
|
||||
}
|
||||
|
||||
type MessageHeader = MessageMetadata;
|
||||
|
||||
pub struct ContextEditor {
|
||||
context: Model<Context>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@@ -1728,7 +1712,7 @@ pub struct ContextEditor {
|
||||
project: Model<Project>,
|
||||
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
editor: View<Editor>,
|
||||
blocks: HashSet<CustomBlockId>,
|
||||
blocks: HashMap<MessageId, (MessageHeader, CustomBlockId)>,
|
||||
image_blocks: HashSet<CustomBlockId>,
|
||||
scroll_position: Option<ScrollPosition>,
|
||||
remote_id: Option<workspace::ViewId>,
|
||||
@@ -3055,176 +3039,209 @@ impl ContextEditor {
|
||||
fn update_message_headers(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
|
||||
let excerpt_id = *buffer.as_singleton().unwrap().0;
|
||||
let old_blocks = std::mem::take(&mut self.blocks);
|
||||
let new_blocks = self
|
||||
.context
|
||||
.read(cx)
|
||||
.messages(cx)
|
||||
.map(|message| BlockProperties {
|
||||
position: buffer
|
||||
.anchor_in_excerpt(excerpt_id, message.anchor)
|
||||
.unwrap(),
|
||||
height: 2,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new({
|
||||
let context = self.context.clone();
|
||||
move |cx| {
|
||||
let message_id = message.id;
|
||||
let show_spinner = message.role == Role::Assistant
|
||||
&& message.status == MessageStatus::Pending;
|
||||
let mut old_blocks = std::mem::take(&mut self.blocks);
|
||||
let mut blocks_to_remove: HashMap<_, _> = old_blocks
|
||||
.iter()
|
||||
.map(|(message_id, (_, block_id))| (*message_id, *block_id))
|
||||
.collect();
|
||||
let mut blocks_to_replace: HashMap<_, RenderBlock> = Default::default();
|
||||
|
||||
let label = match message.role {
|
||||
Role::User => {
|
||||
Label::new("You").color(Color::Default).into_any_element()
|
||||
let render_block = |message: MessageMetadata| -> RenderBlock {
|
||||
Box::new({
|
||||
let context = self.context.clone();
|
||||
move |cx| {
|
||||
let message_id = MessageId(message.timestamp);
|
||||
let show_spinner = message.role == Role::Assistant
|
||||
&& message.status == MessageStatus::Pending;
|
||||
|
||||
let label = match message.role {
|
||||
Role::User => {
|
||||
Label::new("You").color(Color::Default).into_any_element()
|
||||
}
|
||||
Role::Assistant => {
|
||||
let label = Label::new("Assistant").color(Color::Info);
|
||||
if show_spinner {
|
||||
label
|
||||
.with_animation(
|
||||
"pulsating-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.alpha(delta),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
label.into_any_element()
|
||||
}
|
||||
Role::Assistant => {
|
||||
let label = Label::new("Assistant").color(Color::Info);
|
||||
if show_spinner {
|
||||
label
|
||||
.with_animation(
|
||||
"pulsating-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.alpha(delta),
|
||||
}
|
||||
|
||||
Role::System => Label::new("System")
|
||||
.color(Color::Warning)
|
||||
.into_any_element(),
|
||||
};
|
||||
|
||||
let sender = ButtonLike::new("role")
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(label)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Toggle message role",
|
||||
None,
|
||||
"Available roles: You (User), Assistant, System",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let context = context.clone();
|
||||
move |_, cx| {
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(
|
||||
HashSet::from_iter(Some(message_id)),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
h_flex()
|
||||
.id(("message_header", message_id.as_u64()))
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.h_11()
|
||||
.w_full()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(sender)
|
||||
.children(match &message.cache {
|
||||
Some(cache) if cache.is_final_anchor => match cache.status {
|
||||
CacheStatus::Cached => Some(
|
||||
div()
|
||||
.id("cached")
|
||||
.child(
|
||||
Icon::new(IconName::DatabaseZap)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Hint),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
label.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
Role::System => Label::new("System")
|
||||
.color(Color::Warning)
|
||||
.into_any_element(),
|
||||
};
|
||||
|
||||
let sender = ButtonLike::new("role")
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(label)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Toggle message role",
|
||||
None,
|
||||
"Available roles: You (User), Assistant, System",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let context = context.clone();
|
||||
move |_, cx| {
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(
|
||||
HashSet::from_iter(Some(message_id)),
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Context cached",
|
||||
None,
|
||||
"Large messages cached to optimize performance",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.into_any_element(),
|
||||
),
|
||||
CacheStatus::Pending => Some(
|
||||
div()
|
||||
.child(
|
||||
Icon::new(IconName::Ellipsis)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Hint),
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
},
|
||||
_ => None,
|
||||
})
|
||||
.children(match &message.status {
|
||||
MessageStatus::Error(error) => Some(
|
||||
Button::new("show-error", "Error")
|
||||
.color(Color::Error)
|
||||
.selected_label_color(Color::Error)
|
||||
.selected_icon_color(Color::Error)
|
||||
.icon(IconName::XCircle)
|
||||
.icon_color(Color::Error)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Error interacting with language model",
|
||||
None,
|
||||
"Click for more details",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
h_flex()
|
||||
.id(("message_header", message_id.as_u64()))
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.h_11()
|
||||
.w_full()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(sender)
|
||||
.children(match &message.cache {
|
||||
Some(cache) if cache.is_final_anchor => match cache.status {
|
||||
CacheStatus::Cached => Some(
|
||||
div()
|
||||
.id("cached")
|
||||
.child(
|
||||
Icon::new(IconName::DatabaseZap)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Hint),
|
||||
)
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Context cached",
|
||||
None,
|
||||
"Large messages cached to optimize performance",
|
||||
cx,
|
||||
)
|
||||
}).into_any_element()
|
||||
),
|
||||
CacheStatus::Pending => Some(
|
||||
div()
|
||||
.child(
|
||||
Icon::new(IconName::Ellipsis)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Hint),
|
||||
).into_any_element()
|
||||
),
|
||||
},
|
||||
_ => None,
|
||||
})
|
||||
.children(match &message.status {
|
||||
MessageStatus::Error(error) => Some(
|
||||
Button::new("show-error", "Error")
|
||||
.color(Color::Error)
|
||||
.selected_label_color(Color::Error)
|
||||
.selected_icon_color(Color::Error)
|
||||
.icon(IconName::XCircle)
|
||||
.icon_color(Color::Error)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Error interacting with language model",
|
||||
None,
|
||||
"Click for more details",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let context = context.clone();
|
||||
let error = error.clone();
|
||||
move |_, cx| {
|
||||
context.update(cx, |_, cx| {
|
||||
cx.emit(ContextEvent::ShowAssistError(
|
||||
error.clone(),
|
||||
));
|
||||
});
|
||||
}
|
||||
})
|
||||
.into_any_element(),
|
||||
),
|
||||
MessageStatus::Canceled => Some(
|
||||
ButtonLike::new("canceled")
|
||||
.child(
|
||||
Icon::new(IconName::XCircle).color(Color::Disabled),
|
||||
.on_click({
|
||||
let context = context.clone();
|
||||
let error = error.clone();
|
||||
move |_, cx| {
|
||||
context.update(cx, |_, cx| {
|
||||
cx.emit(ContextEvent::ShowAssistError(
|
||||
error.clone(),
|
||||
));
|
||||
});
|
||||
}
|
||||
})
|
||||
.into_any_element(),
|
||||
),
|
||||
MessageStatus::Canceled => Some(
|
||||
ButtonLike::new("canceled")
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Disabled))
|
||||
.child(
|
||||
Label::new("Canceled")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Disabled),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Canceled",
|
||||
None,
|
||||
"Interaction with the assistant was canceled",
|
||||
cx,
|
||||
)
|
||||
.child(
|
||||
Label::new("Canceled")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Disabled),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Canceled",
|
||||
None,
|
||||
"Interaction with the assistant was canceled",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.into_any_element(),
|
||||
),
|
||||
_ => None,
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
}),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: usize::MAX,
|
||||
})
|
||||
.into_any_element(),
|
||||
),
|
||||
_ => None,
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
};
|
||||
let create_block_properties = |message: &Message| BlockProperties {
|
||||
position: buffer
|
||||
.anchor_in_excerpt(excerpt_id, message.anchor)
|
||||
.unwrap(),
|
||||
height: 2,
|
||||
style: BlockStyle::Sticky,
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: usize::MAX,
|
||||
render: render_block(MessageMetadata::from(message)),
|
||||
};
|
||||
let mut new_blocks = vec![];
|
||||
let mut block_index_to_message = vec![];
|
||||
for message in self.context.read(cx).messages(cx) {
|
||||
if let Some(_) = blocks_to_remove.remove(&message.id) {
|
||||
// This is an old message that we might modify.
|
||||
let Some((meta, block_id)) = old_blocks.get_mut(&message.id) else {
|
||||
debug_assert!(
|
||||
false,
|
||||
"old_blocks should contain a message_id we've just removed."
|
||||
);
|
||||
continue;
|
||||
};
|
||||
// Should we modify it?
|
||||
let message_meta = MessageMetadata::from(&message);
|
||||
if meta != &message_meta {
|
||||
blocks_to_replace.insert(*block_id, render_block(message_meta.clone()));
|
||||
*meta = message_meta;
|
||||
}
|
||||
} else {
|
||||
// This is a new message.
|
||||
new_blocks.push(create_block_properties(&message));
|
||||
block_index_to_message.push((message.id, MessageMetadata::from(&message)));
|
||||
}
|
||||
}
|
||||
editor.replace_blocks(blocks_to_replace, None, cx);
|
||||
editor.remove_blocks(blocks_to_remove.into_values().collect(), None, cx);
|
||||
|
||||
editor.remove_blocks(old_blocks, None, cx);
|
||||
let ids = editor.insert_blocks(new_blocks, None, cx);
|
||||
self.blocks = HashSet::from_iter(ids);
|
||||
old_blocks.extend(ids.into_iter().zip(block_index_to_message).map(
|
||||
|(block_id, (message_id, message_meta))| (message_id, (message_meta, block_id)),
|
||||
));
|
||||
self.blocks = old_blocks;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -135,6 +135,7 @@ impl AssistantSettingsContent {
|
||||
Some(language_model::settings::OllamaSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -153,8 +154,8 @@ impl AssistantSettingsContent {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
open_ai::Model::Custom { name, max_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens })
|
||||
open_ai::Model::Custom { name, max_tokens,max_output_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens,max_output_tokens })
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
@@ -295,7 +296,7 @@ impl AssistantSettingsContent {
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Ollama {
|
||||
default_model: Some(ollama::Model::new(&model)),
|
||||
default_model: Some(ollama::Model::new(&model, None, None)),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
|
||||
@@ -330,11 +330,22 @@ pub struct MessageCacheMetadata {
|
||||
pub struct MessageMetadata {
|
||||
pub role: Role,
|
||||
pub status: MessageStatus,
|
||||
timestamp: clock::Lamport,
|
||||
pub(crate) timestamp: clock::Lamport,
|
||||
#[serde(skip)]
|
||||
pub cache: Option<MessageCacheMetadata>,
|
||||
}
|
||||
|
||||
impl From<&Message> for MessageMetadata {
|
||||
fn from(message: &Message) -> Self {
|
||||
Self {
|
||||
role: message.role,
|
||||
status: message.status.clone(),
|
||||
timestamp: message.id.0,
|
||||
cache: message.cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MessageMetadata {
|
||||
pub fn is_cache_valid(&self, buffer: &BufferSnapshot, range: &Range<usize>) -> bool {
|
||||
let result = match &self.cache {
|
||||
|
||||
@@ -161,7 +161,7 @@ impl ContextStore {
|
||||
) -> Result<proto::OpenContextResponse> {
|
||||
let context_id = ContextId::from_proto(envelope.payload.context_id);
|
||||
let operations = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host contexts can be opened"));
|
||||
}
|
||||
|
||||
@@ -190,7 +190,7 @@ impl ContextStore {
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("can only create contexts as the host"));
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@ impl ContextStore {
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::SynchronizeContextsResponse> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host can synchronize contexts"));
|
||||
}
|
||||
|
||||
@@ -356,7 +356,7 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local() {
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
@@ -487,7 +487,7 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local() {
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
|
||||
}
|
||||
|
||||
@@ -589,7 +589,7 @@ impl ContextStore {
|
||||
};
|
||||
|
||||
// For now, only the host can advertise their open contexts.
|
||||
if self.project.read(cx).is_remote() {
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent,
|
||||
CharOperation, LineDiff, LineOperation, ModelSelector, StreamingDiff,
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder,
|
||||
AssistantPanel, AssistantPanelEvent, CharOperation, LineDiff, LineOperation, ModelSelector,
|
||||
StreamingDiff,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{telemetry::Telemetry, ErrorExt};
|
||||
@@ -35,7 +36,7 @@ use language_model::{
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use smol::future::FutureExt;
|
||||
use std::{
|
||||
cmp,
|
||||
@@ -47,6 +48,7 @@ use std::{
|
||||
task::{self, Poll},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip};
|
||||
use util::{RangeExt, ResultExt};
|
||||
@@ -131,6 +133,21 @@ impl InlineAssistant {
|
||||
Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx));
|
||||
})
|
||||
.detach();
|
||||
|
||||
let workspace = workspace.downgrade();
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
let enabled = AssistantSettings::get_global(cx).enabled;
|
||||
terminal_panel.update(cx, |terminal_panel, cx| {
|
||||
terminal_panel.asssistant_enabled(enabled, cx)
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) {
|
||||
@@ -1122,7 +1139,7 @@ impl InlineAssistant {
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.highlight_rows::<DeletedLines>(
|
||||
Anchor::min()..=Anchor::max(),
|
||||
Some(cx.theme().status().deleted_background),
|
||||
|
||||
@@ -190,15 +190,15 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(div().when(model_info.is_selected, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Accent)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.end_slot(div().when(model_info.is_selected, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Accent)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use crate::{
|
||||
slash_command::SlashCommandCompletionProvider, AssistantPanel, InlineAssist, InlineAssistant,
|
||||
};
|
||||
use crate::{slash_command::SlashCommandCompletionProvider, AssistantPanel, InlineAssistant};
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{HashMap, HashSet};
|
||||
@@ -44,6 +42,7 @@ use ui::{
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
use workspace::Workspace;
|
||||
use zed_actions::InlineAssist;
|
||||
|
||||
actions!(
|
||||
prompt_library,
|
||||
@@ -496,7 +495,7 @@ impl PromptLibrary {
|
||||
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
|
||||
if prompt_id.is_built_in() {
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
}
|
||||
editor
|
||||
});
|
||||
@@ -511,7 +510,7 @@ impl PromptLibrary {
|
||||
let mut editor = Editor::for_buffer(buffer, None, cx);
|
||||
if prompt_id.is_built_in() {
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
}
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
|
||||
@@ -8,6 +8,7 @@ use language::BufferSnapshot;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
use text::LineEnding;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -191,8 +192,8 @@ impl PromptBuilder {
|
||||
if let Some(id) = path.split('/').last().and_then(|s| s.strip_suffix(".hbs")) {
|
||||
if let Some(prompt) = Assets.load(path.as_ref()).log_err().flatten() {
|
||||
log::info!("Registering built-in prompt template: {}", id);
|
||||
handlebars
|
||||
.register_template_string(id, String::from_utf8_lossy(prompt.as_ref()))?
|
||||
let prompt = String::from_utf8_lossy(prompt.as_ref());
|
||||
handlebars.register_template_string(id, LineEnding::normalize_cow(prompt))?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -512,10 +512,6 @@ mod custom_path_matcher {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn sources(&self) -> &[String] {
|
||||
&self.sources
|
||||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
let other_path = other.as_ref();
|
||||
self.sources
|
||||
|
||||
@@ -197,6 +197,7 @@ fn tab_items_for_queries(
|
||||
}
|
||||
|
||||
let mut timestamps_by_entity_id = HashMap::default();
|
||||
let mut visited_buffers = HashSet::default();
|
||||
let mut open_buffers = Vec::new();
|
||||
|
||||
for pane in workspace.panes() {
|
||||
@@ -211,9 +212,11 @@ fn tab_items_for_queries(
|
||||
if let Some(timestamp) =
|
||||
timestamps_by_entity_id.get(&editor.entity_id())
|
||||
{
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
if visited_buffers.insert(buffer.read(cx).remote_id()) {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ impl WorkflowStepView {
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.insert_blocks(
|
||||
[
|
||||
BlockProperties {
|
||||
|
||||
@@ -26,6 +26,7 @@ paths.workspace = true
|
||||
release_channel.workspace = true
|
||||
serde.workspace = true
|
||||
util.workspace = true
|
||||
tempfile.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
exec.workspace = true
|
||||
|
||||
@@ -11,6 +11,7 @@ use std::{
|
||||
sync::Arc,
|
||||
thread::{self, JoinHandle},
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::paths::PathWithPosition;
|
||||
|
||||
struct Detect;
|
||||
@@ -22,7 +23,11 @@ trait InstalledApp {
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "zed", disable_version_flag = true)]
|
||||
#[command(
|
||||
name = "zed",
|
||||
disable_version_flag = true,
|
||||
after_help = "To read from stdin, append '-' (e.g. 'ps axf | zed -')"
|
||||
)]
|
||||
struct Args {
|
||||
/// Wait for all of the given paths to be opened/closed before exiting.
|
||||
#[arg(short, long)]
|
||||
@@ -120,6 +125,7 @@ fn main() -> Result<()> {
|
||||
let exit_status = Arc::new(Mutex::new(None));
|
||||
let mut paths = vec![];
|
||||
let mut urls = vec![];
|
||||
let mut stdin_tmp_file: Option<fs::File> = None;
|
||||
for path in args.paths_with_position.iter() {
|
||||
if path.starts_with("zed://")
|
||||
|| path.starts_with("http://")
|
||||
@@ -128,6 +134,11 @@ fn main() -> Result<()> {
|
||||
|| path.starts_with("ssh://")
|
||||
{
|
||||
urls.push(path.to_string());
|
||||
} else if path == "-" && args.paths_with_position.len() == 1 {
|
||||
let file = NamedTempFile::new()?;
|
||||
paths.push(file.path().to_string_lossy().to_string());
|
||||
let (file, _) = file.keep()?;
|
||||
stdin_tmp_file = Some(file);
|
||||
} else {
|
||||
paths.push(parse_path_with_position(path)?)
|
||||
}
|
||||
@@ -162,11 +173,31 @@ fn main() -> Result<()> {
|
||||
}
|
||||
});
|
||||
|
||||
let pipe_handle: JoinHandle<anyhow::Result<()>> = thread::spawn(move || {
|
||||
if let Some(mut tmp_file) = stdin_tmp_file {
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
if io::IsTerminal::is_terminal(&stdin) {
|
||||
return Ok(());
|
||||
}
|
||||
let mut buffer = [0; 8 * 1024];
|
||||
loop {
|
||||
let bytes_read = io::Read::read(&mut stdin, &mut buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
io::Write::write(&mut tmp_file, &buffer[..bytes_read])?;
|
||||
}
|
||||
io::Write::flush(&mut tmp_file)?;
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
if args.foreground {
|
||||
app.run_foreground(url)?;
|
||||
} else {
|
||||
app.launch(url)?;
|
||||
sender.join().unwrap()?;
|
||||
pipe_handle.join().unwrap()?;
|
||||
}
|
||||
|
||||
if let Some(exit_status) = exit_status.lock().take() {
|
||||
|
||||
@@ -48,6 +48,7 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
worktree.workspace = true
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
mod socks;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use async_recursion::async_recursion;
|
||||
use async_tungstenite::tungstenite::{
|
||||
client::IntoClientRequest,
|
||||
@@ -31,6 +32,7 @@ use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, Requ
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::fmt;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
@@ -1177,6 +1179,7 @@ impl Client {
|
||||
.unwrap_or_default();
|
||||
|
||||
let http = self.http.clone();
|
||||
let proxy = http.proxy().cloned();
|
||||
let credentials = credentials.clone();
|
||||
let rpc_url = self.rpc_url(http, release_channel);
|
||||
cx.background_executor().spawn(async move {
|
||||
@@ -1198,7 +1201,7 @@ impl Client {
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
|
||||
let stream = smol::net::TcpStream::connect(rpc_host).await?;
|
||||
let stream = connect_socks_proxy_stream(proxy.as_ref(), rpc_host).await?;
|
||||
|
||||
log::info!("connected to rpc endpoint {}", rpc_url);
|
||||
|
||||
@@ -1392,11 +1395,64 @@ impl Client {
|
||||
id: u64,
|
||||
}
|
||||
|
||||
let github_user = {
|
||||
#[derive(Deserialize)]
|
||||
struct GithubUser {
|
||||
id: i32,
|
||||
login: String,
|
||||
}
|
||||
|
||||
let request = {
|
||||
let mut request_builder =
|
||||
Request::get(&format!("https://api.github.com/users/{login}"));
|
||||
if let Ok(github_token) = std::env::var("GITHUB_TOKEN") {
|
||||
request_builder =
|
||||
request_builder.header("Authorization", format!("Bearer {}", github_token));
|
||||
}
|
||||
|
||||
request_builder.body(AsyncBody::empty())?
|
||||
};
|
||||
|
||||
let mut response = http
|
||||
.send(request)
|
||||
.await
|
||||
.context("error fetching GitHub user")?;
|
||||
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("error reading GitHub user")?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let text = String::from_utf8_lossy(body.as_slice());
|
||||
bail!(
|
||||
"status error {}, response: {text:?}",
|
||||
response.status().as_u16()
|
||||
);
|
||||
}
|
||||
|
||||
let user = serde_json::from_slice::<GithubUser>(body.as_slice()).map_err(|err| {
|
||||
log::error!("Error deserializing: {:?}", err);
|
||||
log::error!(
|
||||
"GitHub API response text: {:?}",
|
||||
String::from_utf8_lossy(body.as_slice())
|
||||
);
|
||||
anyhow!("error deserializing GitHub user")
|
||||
})?;
|
||||
|
||||
user
|
||||
};
|
||||
|
||||
// Use the collab server's admin API to retrieve the id
|
||||
// of the impersonated user.
|
||||
let mut url = self.rpc_url(http.clone(), None).await?;
|
||||
url.set_path("/user");
|
||||
url.set_query(Some(&format!("github_login={login}")));
|
||||
url.set_query(Some(&format!(
|
||||
"github_login={}&github_user_id={}",
|
||||
github_user.login, github_user.id
|
||||
)));
|
||||
let request: http_client::Request<AsyncBody> = Request::get(url.as_str())
|
||||
.header("Authorization", format!("token {api_token}"))
|
||||
.body("".into())?;
|
||||
|
||||
68
crates/client/src/socks.rs
Normal file
68
crates/client/src/socks.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
//! socks proxy
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::io::{AsyncRead, AsyncWrite};
|
||||
use http_client::Uri;
|
||||
use tokio_socks::{
|
||||
io::Compat,
|
||||
tcp::{Socks4Stream, Socks5Stream},
|
||||
};
|
||||
|
||||
pub(crate) async fn connect_socks_proxy_stream(
|
||||
proxy: Option<&Uri>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let stream = match parse_socks_proxy(proxy) {
|
||||
Some((socks_proxy, SocksVersion::V4)) => {
|
||||
let stream = Socks4Stream::connect_with_socket(
|
||||
Compat::new(smol::net::TcpStream::connect(socks_proxy).await?),
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?;
|
||||
Box::new(stream) as Box<dyn AsyncReadWrite>
|
||||
}
|
||||
Some((socks_proxy, SocksVersion::V5)) => Box::new(
|
||||
Socks5Stream::connect_with_socket(
|
||||
Compat::new(smol::net::TcpStream::connect(socks_proxy).await?),
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?,
|
||||
) as Box<dyn AsyncReadWrite>,
|
||||
None => Box::new(smol::net::TcpStream::connect(rpc_host).await?) as Box<dyn AsyncReadWrite>,
|
||||
};
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: Option<&Uri>) -> Option<((String, u16), SocksVersion)> {
|
||||
let Some(proxy_uri) = proxy else {
|
||||
return None;
|
||||
};
|
||||
let Some(scheme) = proxy_uri.scheme_str() else {
|
||||
return None;
|
||||
};
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
// socks4
|
||||
SocksVersion::V4
|
||||
} else if scheme.starts_with("socks") {
|
||||
// socks, socks5
|
||||
SocksVersion::V5
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
if let (Some(host), Some(port)) = (proxy_uri.host(), proxy_uri.port_u16()) {
|
||||
Some(((host.to_string(), port), socks_version))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// private helper structs and traits
|
||||
|
||||
enum SocksVersion {
|
||||
V4,
|
||||
V5,
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite: AsyncRead + AsyncWrite + Unpin + Send + 'static {}
|
||||
impl<T: AsyncRead + AsyncWrite + Unpin + Send + 'static> AsyncReadWrite for T {}
|
||||
@@ -216,6 +216,12 @@ spec:
|
||||
secretKeyRef:
|
||||
name: supermaven
|
||||
key: api_key
|
||||
- name: USER_BACKFILLER_GITHUB_ACCESS_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: user-backfiller
|
||||
key: github_access_token
|
||||
optional: true
|
||||
- name: INVITE_LINK_PREFIX
|
||||
value: ${INVITE_LINK_PREFIX}
|
||||
- name: RUST_BACKTRACE
|
||||
|
||||
@@ -9,14 +9,14 @@ CREATE TABLE "users" (
|
||||
"connected_once" BOOLEAN NOT NULL DEFAULT false,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"metrics_id" TEXT,
|
||||
"github_user_id" INTEGER,
|
||||
"github_user_id" INTEGER NOT NULL,
|
||||
"accepted_tos_at" TIMESTAMP WITHOUT TIME ZONE,
|
||||
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
|
||||
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
|
||||
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
CREATE UNIQUE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
|
||||
CREATE TABLE "access_tokens" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
@@ -86,6 +86,7 @@ CREATE TABLE "worktree_entries" (
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
"is_deleted" BOOL NOT NULL,
|
||||
"git_status" INTEGER,
|
||||
"is_fifo" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
alter table users alter column github_user_id set not null;
|
||||
|
||||
drop index index_users_on_github_user_id;
|
||||
create unique index uix_users_on_github_user_id on users (github_user_id);
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "worktree_entries"
|
||||
ADD "is_fifo" BOOL NOT NULL DEFAULT FALSE;
|
||||
@@ -108,7 +108,7 @@ pub async fn validate_api_token<B>(req: Request<B>, next: Next<B>) -> impl IntoR
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AuthenticatedUserParams {
|
||||
github_user_id: Option<i32>,
|
||||
github_user_id: i32,
|
||||
github_login: String,
|
||||
github_email: Option<String>,
|
||||
github_user_created_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
|
||||
@@ -63,7 +63,7 @@ impl Database {
|
||||
pub async fn add_contributor(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: Option<i32>,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: Option<DateTimeUtc>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
|
||||
@@ -319,6 +319,7 @@ impl Database {
|
||||
git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_fifo: ActiveValue::set(entry.is_fifo),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
@@ -727,6 +728,7 @@ impl Database {
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
is_fifo: db_entry.is_fifo,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -663,6 +663,7 @@ impl Database {
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
is_fifo: db_entry.is_fifo,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,17 +15,17 @@ impl Database {
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(Some(email_address.into())),
|
||||
github_login: ActiveValue::set(params.github_login.clone()),
|
||||
github_user_id: ActiveValue::set(Some(params.github_user_id)),
|
||||
github_user_id: ActiveValue::set(params.github_user_id),
|
||||
admin: ActiveValue::set(admin),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(user::Column::GithubLogin)
|
||||
OnConflict::column(user::Column::GithubUserId)
|
||||
.update_columns([
|
||||
user::Column::Admin,
|
||||
user::Column::EmailAddress,
|
||||
user::Column::GithubUserId,
|
||||
user::Column::GithubLogin,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
@@ -99,7 +99,7 @@ impl Database {
|
||||
pub async fn get_or_create_user_by_github_account(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: Option<i32>,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: Option<DateTimeUtc>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
@@ -121,70 +121,61 @@ impl Database {
|
||||
pub async fn get_or_create_user_by_github_account_tx(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: Option<i32>,
|
||||
github_user_id: i32,
|
||||
github_email: Option<&str>,
|
||||
github_user_created_at: Option<NaiveDateTime>,
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<User> {
|
||||
if let Some(github_user_id) = github_user_id {
|
||||
if let Some(user_by_github_user_id) = user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
|
||||
user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_user_id.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_user_id.update(tx).await?)
|
||||
} else if let Some(user_by_github_login) = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_login = user_by_github_login.into_active_model();
|
||||
user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id));
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_login.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_login.update(tx).await?)
|
||||
} else {
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(github_email.map(|email| email.into())),
|
||||
github_login: ActiveValue::set(github_login.into()),
|
||||
github_user_id: ActiveValue::set(Some(github_user_id)),
|
||||
github_user_created_at: ActiveValue::set(github_user_created_at),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(0),
|
||||
invite_code: ActiveValue::set(None),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec_with_returning(tx)
|
||||
.await?;
|
||||
if let Some(channel_id) = initial_channel_id {
|
||||
channel_member::Entity::insert(channel_member::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
user_id: ActiveValue::Set(user.id),
|
||||
accepted: ActiveValue::Set(true),
|
||||
role: ActiveValue::Set(ChannelRole::Guest),
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
Ok(user)
|
||||
if let Some(user_by_github_user_id) = user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
|
||||
user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_user_id.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_user_id.update(tx).await?)
|
||||
} else if let Some(user_by_github_login) = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_login = user_by_github_login.into_active_model();
|
||||
user_by_github_login.github_user_id = ActiveValue::set(github_user_id);
|
||||
if github_user_created_at.is_some() {
|
||||
user_by_github_login.github_user_created_at =
|
||||
ActiveValue::set(github_user_created_at);
|
||||
}
|
||||
Ok(user_by_github_login.update(tx).await?)
|
||||
} else {
|
||||
let user = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such user {}", github_login))?;
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(github_email.map(|email| email.into())),
|
||||
github_login: ActiveValue::set(github_login.into()),
|
||||
github_user_id: ActiveValue::set(github_user_id),
|
||||
github_user_created_at: ActiveValue::set(github_user_created_at),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(0),
|
||||
invite_code: ActiveValue::set(None),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec_with_returning(tx)
|
||||
.await?;
|
||||
if let Some(channel_id) = initial_channel_id {
|
||||
channel_member::Entity::insert(channel_member::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
user_id: ActiveValue::Set(user.id),
|
||||
accepted: ActiveValue::Set(true),
|
||||
role: ActiveValue::Set(ChannelRole::Guest),
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
Ok(user)
|
||||
}
|
||||
}
|
||||
@@ -377,4 +368,14 @@ impl Database {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users_missing_github_user_created_at(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubUserCreatedAt.is_null())
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub github_user_id: Option<i32>,
|
||||
pub github_user_id: i32,
|
||||
pub github_user_created_at: Option<NaiveDateTime>,
|
||||
pub email_address: Option<String>,
|
||||
pub admin: bool,
|
||||
|
||||
@@ -21,6 +21,7 @@ pub struct Model {
|
||||
pub is_external: bool,
|
||||
pub is_deleted: bool,
|
||||
pub scan_id: i64,
|
||||
pub is_fifo: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -42,7 +42,7 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_c".into(),
|
||||
github_user_id: 102,
|
||||
github_user_id: 103,
|
||||
},
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -25,7 +25,7 @@ async fn test_contributors(db: &Arc<Database>) {
|
||||
assert_eq!(db.get_contributors().await.unwrap(), Vec::<String>::new());
|
||||
|
||||
let user1_created_at = Utc::now();
|
||||
db.add_contributor("user1", Some(1), None, Some(user1_created_at), None)
|
||||
db.add_contributor("user1", 1, None, Some(user1_created_at), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
@@ -34,7 +34,7 @@ async fn test_contributors(db: &Arc<Database>) {
|
||||
);
|
||||
|
||||
let user2_created_at = Utc::now();
|
||||
db.add_contributor("user2", Some(2), None, Some(user2_created_at), None)
|
||||
db.add_contributor("user2", 2, None, Some(user2_created_at), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
|
||||
@@ -45,25 +45,25 @@ async fn test_get_users(db: &Arc<Database>) {
|
||||
(
|
||||
user_ids[0],
|
||||
"user1".to_string(),
|
||||
Some(1),
|
||||
1,
|
||||
Some("user1@example.com".to_string()),
|
||||
),
|
||||
(
|
||||
user_ids[1],
|
||||
"user2".to_string(),
|
||||
Some(2),
|
||||
2,
|
||||
Some("user2@example.com".to_string()),
|
||||
),
|
||||
(
|
||||
user_ids[2],
|
||||
"user3".to_string(),
|
||||
Some(3),
|
||||
3,
|
||||
Some("user3@example.com".to_string()),
|
||||
),
|
||||
(
|
||||
user_ids[3],
|
||||
"user4".to_string(),
|
||||
Some(4),
|
||||
4,
|
||||
Some("user4@example.com".to_string()),
|
||||
)
|
||||
]
|
||||
@@ -101,23 +101,17 @@ async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
.user_id;
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
"the-new-login2",
|
||||
Some(102),
|
||||
None,
|
||||
Some(Utc::now()),
|
||||
None,
|
||||
)
|
||||
.get_or_create_user_by_github_account("the-new-login2", 102, None, Some(Utc::now()), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(user.id, user_id2);
|
||||
assert_eq!(&user.github_login, "the-new-login2");
|
||||
assert_eq!(user.github_user_id, Some(102));
|
||||
assert_eq!(user.github_user_id, 102);
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
"login3",
|
||||
Some(103),
|
||||
103,
|
||||
Some("user3@example.com"),
|
||||
Some(Utc::now()),
|
||||
None,
|
||||
@@ -125,7 +119,7 @@ async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(&user.github_login, "login3");
|
||||
assert_eq!(user.github_user_id, Some(103));
|
||||
assert_eq!(user.github_user_id, 103);
|
||||
assert_eq!(user.email_address, Some("user3@example.com".into()));
|
||||
}
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ pub mod migrations;
|
||||
mod rate_limiter;
|
||||
pub mod rpc;
|
||||
pub mod seed;
|
||||
pub mod user_backfiller;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
@@ -177,6 +178,7 @@ pub struct Config {
|
||||
pub stripe_api_key: Option<String>,
|
||||
pub stripe_price_id: Option<Arc<str>>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
pub user_backfiller_github_access_token: Option<Arc<str>>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
@@ -235,6 +237,7 @@ impl Config {
|
||||
supermaven_admin_api_key: None,
|
||||
qwen2_7b_api_key: None,
|
||||
qwen2_7b_api_url: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ use axum::{
|
||||
Extension, Json, Router, TypedHeader,
|
||||
};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use collections::HashMap;
|
||||
use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase};
|
||||
use futures::{Stream, StreamExt as _};
|
||||
use http_client::IsahcHttpClient;
|
||||
@@ -29,6 +30,7 @@ use std::{
|
||||
sync::Arc,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use strum::IntoEnumIterator;
|
||||
use telemetry::{report_llm_rate_limit, report_llm_usage, LlmRateLimitEventRow, LlmUsageEventRow};
|
||||
use tokio::sync::RwLock;
|
||||
use util::ResultExt;
|
||||
@@ -41,7 +43,8 @@ pub struct LlmState {
|
||||
pub db: Arc<LlmDatabase>,
|
||||
pub http_client: IsahcHttpClient,
|
||||
pub clickhouse_client: Option<clickhouse::Client>,
|
||||
active_user_count: RwLock<Option<(DateTime<Utc>, ActiveUserCount)>>,
|
||||
active_user_count_by_model:
|
||||
RwLock<HashMap<(LanguageModelProvider, String), (DateTime<Utc>, ActiveUserCount)>>,
|
||||
}
|
||||
|
||||
const ACTIVE_USER_COUNT_CACHE_DURATION: Duration = Duration::seconds(30);
|
||||
@@ -69,9 +72,6 @@ impl LlmState {
|
||||
.build()
|
||||
.context("failed to construct http client")?;
|
||||
|
||||
let initial_active_user_count =
|
||||
Some((Utc::now(), db.get_active_user_count(Utc::now()).await?));
|
||||
|
||||
let this = Self {
|
||||
executor,
|
||||
db,
|
||||
@@ -80,25 +80,34 @@ impl LlmState {
|
||||
.clickhouse_url
|
||||
.as_ref()
|
||||
.and_then(|_| build_clickhouse_client(&config).log_err()),
|
||||
active_user_count: RwLock::new(initial_active_user_count),
|
||||
active_user_count_by_model: RwLock::new(HashMap::default()),
|
||||
config,
|
||||
};
|
||||
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
|
||||
pub async fn get_active_user_count(&self) -> Result<ActiveUserCount> {
|
||||
pub async fn get_active_user_count(
|
||||
&self,
|
||||
provider: LanguageModelProvider,
|
||||
model: &str,
|
||||
) -> Result<ActiveUserCount> {
|
||||
let now = Utc::now();
|
||||
|
||||
if let Some((last_updated, count)) = self.active_user_count.read().await.as_ref() {
|
||||
if now - *last_updated < ACTIVE_USER_COUNT_CACHE_DURATION {
|
||||
return Ok(*count);
|
||||
{
|
||||
let active_user_count_by_model = self.active_user_count_by_model.read().await;
|
||||
if let Some((last_updated, count)) =
|
||||
active_user_count_by_model.get(&(provider, model.to_string()))
|
||||
{
|
||||
if now - *last_updated < ACTIVE_USER_COUNT_CACHE_DURATION {
|
||||
return Ok(*count);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut cache = self.active_user_count.write().await;
|
||||
let new_count = self.db.get_active_user_count(now).await?;
|
||||
*cache = Some((now, new_count));
|
||||
let mut cache = self.active_user_count_by_model.write().await;
|
||||
let new_count = self.db.get_active_user_count(provider, model, now).await?;
|
||||
cache.insert((provider, model.to_string()), (now, new_count));
|
||||
Ok(new_count)
|
||||
}
|
||||
}
|
||||
@@ -402,6 +411,11 @@ fn normalize_model_name(known_models: Vec<String>, name: String) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
/// The maximum lifetime spending an individual user can reach before being cut off.
|
||||
///
|
||||
/// Represented in cents.
|
||||
const LIFETIME_SPENDING_LIMIT_IN_CENTS: usize = 1_000 * 100;
|
||||
|
||||
async fn check_usage_limit(
|
||||
state: &Arc<LlmState>,
|
||||
provider: LanguageModelProvider,
|
||||
@@ -419,7 +433,14 @@ async fn check_usage_limit(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let active_users = state.get_active_user_count().await?;
|
||||
if usage.lifetime_spending >= LIFETIME_SPENDING_LIMIT_IN_CENTS {
|
||||
return Err(Error::http(
|
||||
StatusCode::FORBIDDEN,
|
||||
"Maximum spending limit reached.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let active_users = state.get_active_user_count(provider, model_name).await?;
|
||||
|
||||
let users_in_recent_minutes = active_users.users_in_recent_minutes.max(1);
|
||||
let users_in_recent_days = active_users.users_in_recent_days.max(1);
|
||||
@@ -463,6 +484,24 @@ async fn check_usage_limit(
|
||||
};
|
||||
|
||||
if let Some(client) = state.clickhouse_client.as_ref() {
|
||||
tracing::info!(
|
||||
target: "user rate limit",
|
||||
user_id = claims.user_id,
|
||||
login = claims.github_user_login,
|
||||
authn.jti = claims.jti,
|
||||
is_staff = claims.is_staff,
|
||||
provider = provider.to_string(),
|
||||
model = model.name,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
tokens_this_day = usage.tokens_this_day,
|
||||
users_in_recent_minutes = users_in_recent_minutes,
|
||||
users_in_recent_days = users_in_recent_days,
|
||||
max_requests_per_minute = per_user_max_requests_per_minute,
|
||||
max_tokens_per_minute = per_user_max_tokens_per_minute,
|
||||
max_tokens_per_day = per_user_max_tokens_per_day,
|
||||
);
|
||||
|
||||
report_llm_rate_limit(
|
||||
client,
|
||||
LlmRateLimitEventRow {
|
||||
@@ -605,23 +644,39 @@ pub fn log_usage_periodically(state: Arc<LlmState>) {
|
||||
.sleep(std::time::Duration::from_secs(30))
|
||||
.await;
|
||||
|
||||
let Some(usages) = state
|
||||
for provider in LanguageModelProvider::iter() {
|
||||
for model in state.db.model_names_for_provider(provider) {
|
||||
if let Some(active_user_count) = state
|
||||
.get_active_user_count(provider, &model)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
tracing::info!(
|
||||
target: "active user counts",
|
||||
provider = provider.to_string(),
|
||||
model = model,
|
||||
users_in_recent_minutes = active_user_count.users_in_recent_minutes,
|
||||
users_in_recent_days = active_user_count.users_in_recent_days,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(usages) = state
|
||||
.db
|
||||
.get_application_wide_usages_by_model(Utc::now())
|
||||
.await
|
||||
.log_err()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for usage in usages {
|
||||
tracing::info!(
|
||||
target: "computed usage",
|
||||
provider = usage.provider.to_string(),
|
||||
model = usage.model,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
);
|
||||
{
|
||||
for usage in usages {
|
||||
tracing::info!(
|
||||
target: "computed usage",
|
||||
provider = usage.provider.to_string(),
|
||||
model = usage.model,
|
||||
requests_this_minute = usage.requests_this_minute,
|
||||
tokens_this_minute = usage.tokens_this_minute,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -343,15 +343,30 @@ impl LlmDatabase {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_active_user_count(&self, now: DateTimeUtc) -> Result<ActiveUserCount> {
|
||||
/// Returns the active user count for the specified model.
|
||||
pub async fn get_active_user_count(
|
||||
&self,
|
||||
provider: LanguageModelProvider,
|
||||
model_name: &str,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<ActiveUserCount> {
|
||||
self.transaction(|tx| async move {
|
||||
let minute_since = now - Duration::minutes(5);
|
||||
let day_since = now - Duration::days(5);
|
||||
|
||||
let model = self
|
||||
.models
|
||||
.get(&(provider, model_name.to_string()))
|
||||
.ok_or_else(|| anyhow!("unknown model {provider}:{model_name}"))?;
|
||||
|
||||
let tokens_per_minute = self.usage_measure_ids[&UsageMeasure::TokensPerMinute];
|
||||
|
||||
let users_in_recent_minutes = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::Timestamp
|
||||
.gte(minute_since.naive_utc())
|
||||
usage::Column::ModelId
|
||||
.eq(model.id)
|
||||
.and(usage::Column::MeasureId.eq(tokens_per_minute))
|
||||
.and(usage::Column::Timestamp.gte(minute_since.naive_utc()))
|
||||
.and(usage::Column::IsStaff.eq(false)),
|
||||
)
|
||||
.select_only()
|
||||
@@ -362,8 +377,10 @@ impl LlmDatabase {
|
||||
|
||||
let users_in_recent_days = usage::Entity::find()
|
||||
.filter(
|
||||
usage::Column::Timestamp
|
||||
.gte(day_since.naive_utc())
|
||||
usage::Column::ModelId
|
||||
.eq(model.id)
|
||||
.and(usage::Column::MeasureId.eq(tokens_per_minute))
|
||||
.and(usage::Column::Timestamp.gte(day_since.naive_utc()))
|
||||
.and(usage::Column::IsStaff.eq(false)),
|
||||
)
|
||||
.select_only()
|
||||
|
||||
@@ -7,6 +7,7 @@ use axum::{
|
||||
};
|
||||
use collab::llm::{db::LlmDatabase, log_usage_periodically};
|
||||
use collab::migrations::run_database_migrations;
|
||||
use collab::user_backfiller::spawn_user_backfiller;
|
||||
use collab::{api::billing::poll_stripe_events_periodically, llm::LlmState, ServiceMode};
|
||||
use collab::{
|
||||
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor,
|
||||
@@ -131,6 +132,7 @@ async fn main() -> Result<()> {
|
||||
if mode.is_api() {
|
||||
poll_stripe_events_periodically(state.clone());
|
||||
fetch_extensions_from_blob_store_periodically(state.clone());
|
||||
spawn_user_backfiller(state.clone());
|
||||
|
||||
app = app
|
||||
.merge(collab::api::events::router())
|
||||
@@ -300,10 +302,7 @@ async fn handle_liveness_probe(
|
||||
}
|
||||
|
||||
if let Some(llm_state) = llm_state {
|
||||
llm_state
|
||||
.db
|
||||
.get_active_user_count(chrono::Utc::now())
|
||||
.await?;
|
||||
llm_state.db.list_providers().await?;
|
||||
}
|
||||
|
||||
Ok("ok".to_string())
|
||||
|
||||
@@ -78,8 +78,6 @@ use tracing::{
|
||||
info_span, instrument, Instrument,
|
||||
};
|
||||
|
||||
use self::connection_pool::VersionedMessage;
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
// kubernetes gives terminated pods 10s to shutdown gracefully. After they're gone, we can clean up old resources.
|
||||
@@ -478,6 +476,7 @@ impl Server {
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::SearchProject>,
|
||||
))
|
||||
.add_request_handler(user_handler(forward_find_search_candidates_request))
|
||||
.add_request_handler(user_handler(
|
||||
forward_read_only_project_request::<proto::GetDocumentHighlights>,
|
||||
))
|
||||
@@ -506,7 +505,7 @@ impl Server {
|
||||
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_versioned_mutating_project_request::<proto::OpenNewBuffer>,
|
||||
forward_mutating_project_request::<proto::OpenNewBuffer>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_mutating_project_request::<proto::ResolveCompletionDocumentation>,
|
||||
@@ -548,7 +547,7 @@ impl Server {
|
||||
forward_mutating_project_request::<proto::OnTypeFormatting>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_versioned_mutating_project_request::<proto::SaveBuffer>,
|
||||
forward_mutating_project_request::<proto::SaveBuffer>,
|
||||
))
|
||||
.add_request_handler(user_handler(
|
||||
forward_mutating_project_request::<proto::BlameBuffer>,
|
||||
@@ -2943,6 +2942,59 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn forward_find_search_candidates_request(
|
||||
request: proto::FindSearchCandidates,
|
||||
response: Response<proto::FindSearchCandidates>,
|
||||
session: UserSession,
|
||||
) -> Result<()> {
|
||||
let project_id = ProjectId::from_proto(request.remote_entity_id());
|
||||
let host_connection_id = session
|
||||
.db()
|
||||
.await
|
||||
.host_for_read_only_project_request(project_id, session.connection_id, session.user_id())
|
||||
.await?;
|
||||
|
||||
let host_version = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.connection(host_connection_id)
|
||||
.map(|c| c.zed_version);
|
||||
|
||||
if host_version.is_some_and(|host_version| host_version < ZedVersion::with_search_candidates())
|
||||
{
|
||||
let query = request.query.ok_or_else(|| anyhow!("missing query"))?;
|
||||
let search = proto::SearchProject {
|
||||
project_id: project_id.to_proto(),
|
||||
query: query.query,
|
||||
regex: query.regex,
|
||||
whole_word: query.whole_word,
|
||||
case_sensitive: query.case_sensitive,
|
||||
files_to_include: query.files_to_include,
|
||||
files_to_exclude: query.files_to_exclude,
|
||||
include_ignored: query.include_ignored,
|
||||
};
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, search)
|
||||
.await?;
|
||||
return response.send(proto::FindSearchCandidatesResponse {
|
||||
buffer_ids: payload
|
||||
.locations
|
||||
.into_iter()
|
||||
.map(|loc| loc.buffer_id)
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, request)
|
||||
.await?;
|
||||
response.send(payload)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// forward a project request to the dev server. Only allowed
|
||||
/// if it's your dev server.
|
||||
async fn forward_project_request_for_owner<T>(
|
||||
@@ -2993,45 +3045,6 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// forward a project request to the host. These requests are disallowed
|
||||
/// for guests.
|
||||
async fn forward_versioned_mutating_project_request<T>(
|
||||
request: T,
|
||||
response: Response<T>,
|
||||
session: UserSession,
|
||||
) -> Result<()>
|
||||
where
|
||||
T: EntityMessage + RequestMessage + VersionedMessage,
|
||||
{
|
||||
let project_id = ProjectId::from_proto(request.remote_entity_id());
|
||||
|
||||
let host_connection_id = session
|
||||
.db()
|
||||
.await
|
||||
.host_for_mutating_project_request(project_id, session.connection_id, session.user_id())
|
||||
.await?;
|
||||
if let Some(host_version) = session
|
||||
.connection_pool()
|
||||
.await
|
||||
.connection(host_connection_id)
|
||||
.map(|c| c.zed_version)
|
||||
{
|
||||
if let Some(min_required_version) = request.required_host_version() {
|
||||
if min_required_version > host_version {
|
||||
return Err(anyhow!(ErrorCode::RemoteUpgradeRequired
|
||||
.with_tag("required", &min_required_version.to_string())))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let payload = session
|
||||
.peer
|
||||
.forward_request(session.connection_id, host_connection_id, request)
|
||||
.await?;
|
||||
response.send(payload)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Notify other participants that a new buffer has been created
|
||||
async fn create_buffer_for_peer(
|
||||
request: proto::CreateBufferForPeer,
|
||||
|
||||
@@ -32,37 +32,15 @@ impl fmt::Display for ZedVersion {
|
||||
|
||||
impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
self.0 >= SemanticVersion::new(0, 129, 2)
|
||||
}
|
||||
|
||||
pub fn with_save_as() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 134, 0))
|
||||
self.0 >= SemanticVersion::new(0, 134, 0)
|
||||
}
|
||||
|
||||
pub fn with_list_directory() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 145, 0))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VersionedMessage {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl VersionedMessage for proto::SaveBuffer {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
if self.new_path.is_some() {
|
||||
Some(ZedVersion::with_save_as())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VersionedMessage for proto::OpenNewBuffer {
|
||||
fn required_host_version(&self) -> Option<ZedVersion> {
|
||||
Some(ZedVersion::with_save_as())
|
||||
pub fn with_search_candidates() -> ZedVersion {
|
||||
ZedVersion(SemanticVersion::new(0, 151, 0))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
Some(github_user.id),
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
None,
|
||||
None,
|
||||
|
||||
@@ -168,7 +168,7 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
server
|
||||
.app_state
|
||||
.db
|
||||
.get_or_create_user_by_github_account("user_b", Some(100), None, Some(Utc::now()), None)
|
||||
.get_or_create_user_by_github_account("user_b", 100, None, Some(Utc::now()), None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -266,7 +266,7 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
server
|
||||
.app_state
|
||||
.db
|
||||
.add_contributor("user_b", Some(100), None, Some(Utc::now()), None)
|
||||
.add_contributor("user_b", 100, None, Some(Utc::now()), None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -28,8 +28,8 @@ use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath,
|
||||
SearchResult,
|
||||
search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind,
|
||||
Project, ProjectPath,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
@@ -3178,7 +3178,7 @@ async fn test_fs_operations(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.copy_entry(entry.id, Path::new("f.txt"), cx)
|
||||
project.copy_entry(entry.id, None, Path::new("f.txt"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
|
||||
@@ -15,7 +15,7 @@ use language::{
|
||||
use lsp::FakeLanguageServer;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::{
|
||||
search::SearchQuery, Project, ProjectPath, SearchResult, DEFAULT_COMPLETION_CONTEXT,
|
||||
search::SearchQuery, search::SearchResult, Project, ProjectPath, DEFAULT_COMPLETION_CONTEXT,
|
||||
};
|
||||
use rand::{
|
||||
distributions::{Alphanumeric, DistString},
|
||||
@@ -298,7 +298,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let is_local =
|
||||
project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees(cx)
|
||||
@@ -334,7 +335,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
|
||||
match rng.gen_range(0..100_u32) {
|
||||
// Manipulate an existing buffer
|
||||
@@ -1254,7 +1255,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let buffers = client.buffers().clone();
|
||||
for (guest_project, guest_buffers) in &buffers {
|
||||
let project_id = if guest_project.read_with(client_cx, |project, _| {
|
||||
project.is_local() || project.is_disconnected()
|
||||
project.is_local_or_ssh() || project.is_disconnected()
|
||||
}) {
|
||||
continue;
|
||||
} else {
|
||||
@@ -1558,7 +1559,9 @@ async fn ensure_project_shared(
|
||||
let first_root_name = root_name_for_project(project, cx);
|
||||
let active_call = cx.read(ActiveCall::global);
|
||||
if active_call.read_with(cx, |call, _| call.room().is_some())
|
||||
&& project.read_with(cx, |project, _| project.is_local() && !project.is_shared())
|
||||
&& project.read_with(cx, |project, _| {
|
||||
project.is_local_or_ssh() && !project.is_shared()
|
||||
})
|
||||
{
|
||||
match active_call
|
||||
.update(cx, |call, cx| call.share_project(project.clone(), cx))
|
||||
|
||||
@@ -682,6 +682,7 @@ impl TestServer {
|
||||
supermaven_admin_api_key: None,
|
||||
qwen2_7b_api_key: None,
|
||||
qwen2_7b_api_url: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
162
crates/collab/src/user_backfiller.rs
Normal file
162
crates/collab/src/user_backfiller.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::db::Database;
|
||||
use crate::executor::Executor;
|
||||
use crate::{AppState, Config};
|
||||
|
||||
pub fn spawn_user_backfiller(app_state: Arc<AppState>) {
|
||||
let Some(user_backfiller_github_access_token) =
|
||||
app_state.config.user_backfiller_github_access_token.clone()
|
||||
else {
|
||||
log::info!("no USER_BACKFILLER_GITHUB_ACCESS_TOKEN set; not spawning user backfiller");
|
||||
return;
|
||||
};
|
||||
|
||||
let executor = app_state.executor.clone();
|
||||
executor.spawn_detached({
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
let user_backfiller = UserBackfiller::new(
|
||||
app_state.config.clone(),
|
||||
user_backfiller_github_access_token,
|
||||
app_state.db.clone(),
|
||||
executor,
|
||||
);
|
||||
|
||||
log::info!("backfilling users");
|
||||
|
||||
user_backfiller
|
||||
.backfill_github_user_created_at()
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const GITHUB_REQUESTS_PER_HOUR_LIMIT: usize = 5_000;
|
||||
const SLEEP_DURATION_BETWEEN_USERS: std::time::Duration = std::time::Duration::from_millis(
|
||||
(GITHUB_REQUESTS_PER_HOUR_LIMIT as f64 / 60. / 60. * 1000.) as u64,
|
||||
);
|
||||
|
||||
struct UserBackfiller {
|
||||
config: Config,
|
||||
github_access_token: Arc<str>,
|
||||
db: Arc<Database>,
|
||||
http_client: reqwest::Client,
|
||||
executor: Executor,
|
||||
}
|
||||
|
||||
impl UserBackfiller {
|
||||
fn new(
|
||||
config: Config,
|
||||
github_access_token: Arc<str>,
|
||||
db: Arc<Database>,
|
||||
executor: Executor,
|
||||
) -> Self {
|
||||
Self {
|
||||
config,
|
||||
github_access_token,
|
||||
db,
|
||||
http_client: reqwest::Client::new(),
|
||||
executor,
|
||||
}
|
||||
}
|
||||
|
||||
async fn backfill_github_user_created_at(&self) -> Result<()> {
|
||||
let initial_channel_id = self.config.auto_join_channel_id;
|
||||
|
||||
let users_missing_github_user_created_at =
|
||||
self.db.get_users_missing_github_user_created_at().await?;
|
||||
|
||||
for user in users_missing_github_user_created_at {
|
||||
match self
|
||||
.fetch_github_user(&format!(
|
||||
"https://api.github.com/user/{}",
|
||||
user.github_user_id
|
||||
))
|
||||
.await
|
||||
{
|
||||
Ok(github_user) => {
|
||||
self.db
|
||||
.get_or_create_user_by_github_account(
|
||||
&user.github_login,
|
||||
github_user.id,
|
||||
user.email_address.as_deref(),
|
||||
Some(github_user.created_at),
|
||||
initial_channel_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
log::info!("backfilled user: {}", user.github_login);
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!("failed to fetch GitHub user {}: {err}", user.github_login);
|
||||
}
|
||||
}
|
||||
|
||||
self.executor.sleep(SLEEP_DURATION_BETWEEN_USERS).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_github_user(&self, url: &str) -> Result<GithubUser> {
|
||||
let response = self
|
||||
.http_client
|
||||
.get(url)
|
||||
.header(
|
||||
"authorization",
|
||||
format!("Bearer {}", self.github_access_token),
|
||||
)
|
||||
.header("user-agent", "zed")
|
||||
.send()
|
||||
.await
|
||||
.with_context(|| format!("failed to fetch '{url}'"))?;
|
||||
|
||||
let rate_limit_remaining = response
|
||||
.headers()
|
||||
.get("x-ratelimit-remaining")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse::<i32>().ok());
|
||||
let rate_limit_reset = response
|
||||
.headers()
|
||||
.get("x-ratelimit-reset")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse::<i64>().ok())
|
||||
.and_then(|value| DateTime::from_timestamp(value, 0));
|
||||
|
||||
if rate_limit_remaining == Some(0) {
|
||||
if let Some(reset_at) = rate_limit_reset {
|
||||
let now = Utc::now();
|
||||
if reset_at > now {
|
||||
let sleep_duration = reset_at - now;
|
||||
log::info!(
|
||||
"rate limit reached. Sleeping for {} seconds",
|
||||
sleep_duration.num_seconds()
|
||||
);
|
||||
self.executor.sleep(sleep_duration.to_std().unwrap()).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let response = match response.error_for_status() {
|
||||
Ok(response) => response,
|
||||
Err(err) => return Err(anyhow!("failed to fetch GitHub user: {err}")),
|
||||
};
|
||||
|
||||
response
|
||||
.json()
|
||||
.await
|
||||
.with_context(|| format!("failed to deserialize GitHub user from '{url}'"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct GithubUser {
|
||||
id: i32,
|
||||
created_at: DateTime<Utc>,
|
||||
}
|
||||
@@ -1395,15 +1395,22 @@ impl CollabPanel {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn reset_filter_editor_text(&mut self, cx: &mut ViewContext<Self>) -> bool {
|
||||
self.filter_editor.update(cx, |editor, cx| {
|
||||
if editor.buffer().read(cx).len(cx) > 0 {
|
||||
editor.set_text("", cx);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
|
||||
if self.take_editing_state(cx) {
|
||||
cx.focus_view(&self.filter_editor);
|
||||
} else {
|
||||
self.filter_editor.update(cx, |editor, cx| {
|
||||
if editor.buffer().read(cx).len(cx) > 0 {
|
||||
editor.set_text("", cx);
|
||||
}
|
||||
});
|
||||
} else if !self.reset_filter_editor_text(cx) {
|
||||
self.focus_handle.focus(cx);
|
||||
}
|
||||
|
||||
if self.context_menu.is_some() {
|
||||
|
||||
@@ -1060,7 +1060,7 @@ mod tests {
|
||||
editor.change_selections(None, cx, |selections| {
|
||||
selections.select_ranges([Point::new(0, 0)..Point::new(0, 0)])
|
||||
});
|
||||
editor.next_inline_completion(&Default::default(), cx);
|
||||
editor.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
|
||||
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
|
||||
@@ -1070,7 +1070,7 @@ mod tests {
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.select_ranges([Point::new(2, 0)..Point::new(2, 0)])
|
||||
});
|
||||
editor.next_inline_completion(&Default::default(), cx);
|
||||
editor.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
|
||||
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
|
||||
|
||||
26
crates/docs_preprocessor/Cargo.toml
Normal file
26
crates/docs_preprocessor/Cargo.toml
Normal file
@@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "docs_preprocessor"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
clap.workspace = true
|
||||
mdbook = "0.4.40"
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
regex.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/docs_preprocessor.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "docs_preprocessor"
|
||||
path = "src/main.rs"
|
||||
93
crates/docs_preprocessor/src/docs_preprocessor.rs
Normal file
93
crates/docs_preprocessor/src/docs_preprocessor.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use anyhow::Result;
|
||||
use mdbook::book::{Book, BookItem};
|
||||
use mdbook::errors::Error;
|
||||
use mdbook::preprocess::{Preprocessor, PreprocessorContext as MdBookContext};
|
||||
use settings::KeymapFile;
|
||||
use std::sync::Arc;
|
||||
use util::asset_str;
|
||||
|
||||
mod templates;
|
||||
|
||||
use templates::{ActionTemplate, KeybindingTemplate, Template};
|
||||
|
||||
pub struct PreprocessorContext {
|
||||
macos_keymap: Arc<KeymapFile>,
|
||||
linux_keymap: Arc<KeymapFile>,
|
||||
}
|
||||
|
||||
impl PreprocessorContext {
|
||||
pub fn new() -> Result<Self> {
|
||||
let macos_keymap = Arc::new(load_keymap("keymaps/default-macos.json")?);
|
||||
let linux_keymap = Arc::new(load_keymap("keymaps/default-linux.json")?);
|
||||
Ok(Self {
|
||||
macos_keymap,
|
||||
linux_keymap,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_binding(&self, os: &str, action: &str) -> Option<String> {
|
||||
let keymap = match os {
|
||||
"macos" => &self.macos_keymap,
|
||||
"linux" => &self.linux_keymap,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
keymap.blocks().iter().find_map(|block| {
|
||||
block.bindings().iter().find_map(|(keystroke, a)| {
|
||||
if a.to_string() == action {
|
||||
Some(keystroke.to_string())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn load_keymap(asset_path: &str) -> Result<KeymapFile> {
|
||||
let content = asset_str::<settings::SettingsAssets>(asset_path);
|
||||
KeymapFile::parse(content.as_ref())
|
||||
}
|
||||
|
||||
pub struct ZedDocsPreprocessor {
|
||||
context: PreprocessorContext,
|
||||
templates: Vec<Box<dyn Template>>,
|
||||
}
|
||||
|
||||
impl ZedDocsPreprocessor {
|
||||
pub fn new() -> Result<Self> {
|
||||
let context = PreprocessorContext::new()?;
|
||||
let templates: Vec<Box<dyn Template>> = vec![
|
||||
Box::new(KeybindingTemplate::new()),
|
||||
Box::new(ActionTemplate::new()),
|
||||
];
|
||||
Ok(Self { context, templates })
|
||||
}
|
||||
|
||||
fn process_content(&self, content: &str) -> String {
|
||||
let mut processed = content.to_string();
|
||||
for template in &self.templates {
|
||||
processed = template.process(&self.context, &processed);
|
||||
}
|
||||
processed
|
||||
}
|
||||
}
|
||||
|
||||
impl Preprocessor for ZedDocsPreprocessor {
|
||||
fn name(&self) -> &str {
|
||||
"zed-docs-preprocessor"
|
||||
}
|
||||
|
||||
fn run(&self, _ctx: &MdBookContext, mut book: Book) -> Result<Book, Error> {
|
||||
book.for_each_mut(|item| {
|
||||
if let BookItem::Chapter(chapter) = item {
|
||||
chapter.content = self.process_content(&chapter.content);
|
||||
}
|
||||
});
|
||||
Ok(book)
|
||||
}
|
||||
|
||||
fn supports_renderer(&self, renderer: &str) -> bool {
|
||||
renderer != "not-supported"
|
||||
}
|
||||
}
|
||||
58
crates/docs_preprocessor/src/main.rs
Normal file
58
crates/docs_preprocessor/src/main.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use anyhow::{Context, Result};
|
||||
use clap::{Arg, ArgMatches, Command};
|
||||
use docs_preprocessor::ZedDocsPreprocessor;
|
||||
use mdbook::preprocess::{CmdPreprocessor, Preprocessor};
|
||||
use std::io::{self, Read};
|
||||
use std::process;
|
||||
|
||||
pub fn make_app() -> Command {
|
||||
Command::new("zed-docs-preprocessor")
|
||||
.about("Preprocesses Zed Docs content to provide rich action & keybinding support and more")
|
||||
.subcommand(
|
||||
Command::new("supports")
|
||||
.arg(Arg::new("renderer").required(true))
|
||||
.about("Check whether a renderer is supported by this preprocessor"),
|
||||
)
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let matches = make_app().get_matches();
|
||||
|
||||
let preprocessor =
|
||||
ZedDocsPreprocessor::new().context("Failed to create ZedDocsPreprocessor")?;
|
||||
|
||||
if let Some(sub_args) = matches.subcommand_matches("supports") {
|
||||
handle_supports(&preprocessor, sub_args);
|
||||
} else {
|
||||
handle_preprocessing(&preprocessor)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_preprocessing(pre: &dyn Preprocessor) -> Result<()> {
|
||||
let mut stdin = io::stdin();
|
||||
let mut input = String::new();
|
||||
stdin.read_to_string(&mut input)?;
|
||||
|
||||
let (ctx, book) = CmdPreprocessor::parse_input(input.as_bytes())?;
|
||||
|
||||
let processed_book = pre.run(&ctx, book)?;
|
||||
|
||||
serde_json::to_writer(io::stdout(), &processed_book)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_supports(pre: &dyn Preprocessor, sub_args: &ArgMatches) -> ! {
|
||||
let renderer = sub_args
|
||||
.get_one::<String>("renderer")
|
||||
.expect("Required argument");
|
||||
let supported = pre.supports_renderer(renderer);
|
||||
|
||||
if supported {
|
||||
process::exit(0);
|
||||
} else {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
25
crates/docs_preprocessor/src/templates.rs
Normal file
25
crates/docs_preprocessor/src/templates.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use crate::PreprocessorContext;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
mod action;
|
||||
mod keybinding;
|
||||
|
||||
pub use action::*;
|
||||
pub use keybinding::*;
|
||||
|
||||
pub trait Template {
|
||||
fn key(&self) -> &'static str;
|
||||
fn regex(&self) -> Regex;
|
||||
fn parse_args(&self, args: &str) -> HashMap<String, String>;
|
||||
fn render(&self, context: &PreprocessorContext, args: &HashMap<String, String>) -> String;
|
||||
|
||||
fn process(&self, context: &PreprocessorContext, content: &str) -> String {
|
||||
self.regex()
|
||||
.replace_all(content, |caps: ®ex::Captures| {
|
||||
let args = self.parse_args(&caps[1]);
|
||||
self.render(context, &args)
|
||||
})
|
||||
.into_owned()
|
||||
}
|
||||
}
|
||||
50
crates/docs_preprocessor/src/templates/action.rs
Normal file
50
crates/docs_preprocessor/src/templates/action.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use crate::PreprocessorContext;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::Template;
|
||||
|
||||
pub struct ActionTemplate;
|
||||
|
||||
impl ActionTemplate {
|
||||
pub fn new() -> Self {
|
||||
ActionTemplate
|
||||
}
|
||||
}
|
||||
|
||||
impl Template for ActionTemplate {
|
||||
fn key(&self) -> &'static str {
|
||||
"action"
|
||||
}
|
||||
|
||||
fn regex(&self) -> Regex {
|
||||
Regex::new(&format!(r"\{{#{}(.*?)\}}", self.key())).unwrap()
|
||||
}
|
||||
|
||||
fn parse_args(&self, args: &str) -> HashMap<String, String> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("name".to_string(), args.trim().to_string());
|
||||
map
|
||||
}
|
||||
|
||||
fn render(&self, _context: &PreprocessorContext, args: &HashMap<String, String>) -> String {
|
||||
let name = args.get("name").map(String::as_str).unwrap_or_default();
|
||||
|
||||
let formatted_name = name
|
||||
.chars()
|
||||
.enumerate()
|
||||
.map(|(i, c)| {
|
||||
if i > 0 && c.is_uppercase() {
|
||||
format!(" {}", c.to_lowercase())
|
||||
} else {
|
||||
c.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<String>()
|
||||
.trim()
|
||||
.to_string()
|
||||
.replace("::", ":");
|
||||
|
||||
format!("<code class=\"hljs\">{}</code>", formatted_name)
|
||||
}
|
||||
}
|
||||
36
crates/docs_preprocessor/src/templates/keybinding.rs
Normal file
36
crates/docs_preprocessor/src/templates/keybinding.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use crate::PreprocessorContext;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::Template;
|
||||
|
||||
pub struct KeybindingTemplate;
|
||||
|
||||
impl KeybindingTemplate {
|
||||
pub fn new() -> Self {
|
||||
KeybindingTemplate
|
||||
}
|
||||
}
|
||||
|
||||
impl Template for KeybindingTemplate {
|
||||
fn key(&self) -> &'static str {
|
||||
"kb"
|
||||
}
|
||||
|
||||
fn regex(&self) -> Regex {
|
||||
Regex::new(&format!(r"\{{#{}(.*?)\}}", self.key())).unwrap()
|
||||
}
|
||||
|
||||
fn parse_args(&self, args: &str) -> HashMap<String, String> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert("action".to_string(), args.trim().to_string());
|
||||
map
|
||||
}
|
||||
|
||||
fn render(&self, context: &PreprocessorContext, args: &HashMap<String, String>) -> String {
|
||||
let action = args.get("action").map(String::as_str).unwrap_or("");
|
||||
let macos_binding = context.find_binding("macos", action).unwrap_or_default();
|
||||
let linux_binding = context.find_binding("linux", action).unwrap_or_default();
|
||||
format!("<kbd class=\"keybinding\">{macos_binding}|{linux_binding}</kbd>")
|
||||
}
|
||||
}
|
||||
@@ -199,6 +199,7 @@ gpui::actions!(
|
||||
CopyHighlightJson,
|
||||
CopyPath,
|
||||
CopyPermalinkToLine,
|
||||
CopyFileLocation,
|
||||
CopyRelativePath,
|
||||
Cut,
|
||||
CutToEndOfLine,
|
||||
@@ -262,6 +263,7 @@ gpui::actions!(
|
||||
OpenExcerptsSplit,
|
||||
OpenPermalinkToLine,
|
||||
OpenUrl,
|
||||
OpenFile,
|
||||
Outdent,
|
||||
PageDown,
|
||||
PageUp,
|
||||
@@ -306,6 +308,7 @@ gpui::actions!(
|
||||
SortLinesCaseInsensitive,
|
||||
SortLinesCaseSensitive,
|
||||
SplitSelectionIntoLines,
|
||||
SwitchSourceHeader,
|
||||
Tab,
|
||||
TabPrev,
|
||||
ToggleAutoSignatureHelp,
|
||||
@@ -314,7 +317,9 @@ gpui::actions!(
|
||||
ToggleSelectionMenu,
|
||||
ToggleHunkDiff,
|
||||
ToggleInlayHints,
|
||||
ToggleInlineCompletions,
|
||||
ToggleLineNumbers,
|
||||
ToggleRelativeLineNumbers,
|
||||
ToggleIndentGuides,
|
||||
ToggleSoftWrap,
|
||||
ToggleTabBar,
|
||||
|
||||
93
crates/editor/src/clangd_ext.rs
Normal file
93
crates/editor/src/clangd_ext.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{View, ViewContext, WindowContext};
|
||||
use language::Language;
|
||||
use url::Url;
|
||||
|
||||
use crate::lsp_ext::find_specific_language_server_in_selection;
|
||||
|
||||
use crate::{element::register_action, Editor, SwitchSourceHeader};
|
||||
|
||||
static CLANGD_SERVER_NAME: &str = "clangd";
|
||||
|
||||
fn is_c_language(language: &Language) -> bool {
|
||||
return language.name().as_ref() == "C++" || language.name().as_ref() == "C";
|
||||
}
|
||||
|
||||
pub fn switch_source_header(
|
||||
editor: &mut Editor,
|
||||
_: &SwitchSourceHeader,
|
||||
cx: &mut ViewContext<'_, Editor>,
|
||||
) {
|
||||
let Some(project) = &editor.project else {
|
||||
return;
|
||||
};
|
||||
let Some(workspace) = editor.workspace() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some((_, _, server_to_query, buffer)) =
|
||||
find_specific_language_server_in_selection(&editor, cx, &is_c_language, CLANGD_SERVER_NAME)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project = project.clone();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let source_file = buffer_snapshot
|
||||
.file()
|
||||
.unwrap()
|
||||
.file_name(cx)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_owned();
|
||||
|
||||
let switch_source_header_task = project.update(cx, |project, cx| {
|
||||
project.request_lsp(
|
||||
buffer,
|
||||
project::LanguageServerToQuery::Other(server_to_query),
|
||||
project::lsp_ext_command::SwitchSourceHeader,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.spawn(|_editor, mut cx| async move {
|
||||
let switch_source_header = switch_source_header_task
|
||||
.await
|
||||
.with_context(|| format!("Switch source/header LSP request for path \"{}\" failed", source_file))?;
|
||||
if switch_source_header.0.is_empty() {
|
||||
log::info!("Clangd returned an empty string when requesting to switch source/header from \"{}\"", source_file);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let goto = Url::parse(&switch_source_header.0).with_context(|| {
|
||||
format!(
|
||||
"Parsing URL \"{}\" returned from switch source/header failed",
|
||||
switch_source_header.0
|
||||
)
|
||||
})?;
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |workspace, view_cx| {
|
||||
workspace.open_abs_path(PathBuf::from(goto.path()), false, view_cx)
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Switch source/header could not open \"{}\" in workspace",
|
||||
goto.path()
|
||||
)
|
||||
})?
|
||||
.await
|
||||
.map(|_| ())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
pub fn apply_related_actions(editor: &View<Editor>, cx: &mut WindowContext) {
|
||||
if editor.update(cx, |e, cx| {
|
||||
find_specific_language_server_in_selection(e, cx, &is_c_language, CLANGD_SERVER_NAME)
|
||||
.is_some()
|
||||
}) {
|
||||
register_action(editor, cx, switch_source_header);
|
||||
}
|
||||
}
|
||||
@@ -783,11 +783,13 @@ impl<'a> BlockMapWriter<'a> {
|
||||
&mut self,
|
||||
blocks: impl IntoIterator<Item = BlockProperties<Anchor>>,
|
||||
) -> Vec<CustomBlockId> {
|
||||
let mut ids = Vec::new();
|
||||
let blocks = blocks.into_iter();
|
||||
let mut ids = Vec::with_capacity(blocks.size_hint().1.unwrap_or(0));
|
||||
let mut edits = Patch::default();
|
||||
let wrap_snapshot = &*self.0.wrap_snapshot.borrow();
|
||||
let buffer = wrap_snapshot.buffer_snapshot();
|
||||
|
||||
let mut previous_wrap_row_range: Option<Range<u32>> = None;
|
||||
for block in blocks {
|
||||
let id = CustomBlockId(self.0.next_block_id.fetch_add(1, SeqCst));
|
||||
ids.push(id);
|
||||
@@ -797,11 +799,18 @@ impl<'a> BlockMapWriter<'a> {
|
||||
let wrap_row = wrap_snapshot
|
||||
.make_wrap_point(Point::new(point.row, 0), Bias::Left)
|
||||
.row();
|
||||
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
.next_row_boundary(WrapPoint::new(wrap_row, 0))
|
||||
.unwrap_or(wrap_snapshot.max_point().row() + 1);
|
||||
|
||||
let (start_row, end_row) = {
|
||||
previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row));
|
||||
let range = previous_wrap_row_range.get_or_insert_with(|| {
|
||||
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
.next_row_boundary(WrapPoint::new(wrap_row, 0))
|
||||
.unwrap_or(wrap_snapshot.max_point().row() + 1);
|
||||
start_row..end_row
|
||||
});
|
||||
(range.start, range.end)
|
||||
};
|
||||
let block_ix = match self
|
||||
.0
|
||||
.custom_blocks
|
||||
@@ -881,6 +890,7 @@ impl<'a> BlockMapWriter<'a> {
|
||||
let buffer = wrap_snapshot.buffer_snapshot();
|
||||
let mut edits = Patch::default();
|
||||
let mut last_block_buffer_row = None;
|
||||
let mut previous_wrap_row_range: Option<Range<u32>> = None;
|
||||
self.0.custom_blocks.retain(|block| {
|
||||
if block_ids.contains(&block.id) {
|
||||
let buffer_row = block.position.to_point(buffer).row;
|
||||
@@ -889,21 +899,32 @@ impl<'a> BlockMapWriter<'a> {
|
||||
let wrap_row = wrap_snapshot
|
||||
.make_wrap_point(Point::new(buffer_row, 0), Bias::Left)
|
||||
.row();
|
||||
let start_row = wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
.next_row_boundary(WrapPoint::new(wrap_row, 0))
|
||||
.unwrap_or(wrap_snapshot.max_point().row() + 1);
|
||||
let (start_row, end_row) = {
|
||||
previous_wrap_row_range.take_if(|range| !range.contains(&wrap_row));
|
||||
let range = previous_wrap_row_range.get_or_insert_with(|| {
|
||||
let start_row =
|
||||
wrap_snapshot.prev_row_boundary(WrapPoint::new(wrap_row, 0));
|
||||
let end_row = wrap_snapshot
|
||||
.next_row_boundary(WrapPoint::new(wrap_row, 0))
|
||||
.unwrap_or(wrap_snapshot.max_point().row() + 1);
|
||||
start_row..end_row
|
||||
});
|
||||
(range.start, range.end)
|
||||
};
|
||||
|
||||
edits.push(Edit {
|
||||
old: start_row..end_row,
|
||||
new: start_row..end_row,
|
||||
})
|
||||
}
|
||||
self.0.custom_blocks_by_id.remove(&block.id);
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
self.0
|
||||
.custom_blocks_by_id
|
||||
.retain(|id, _| !block_ids.contains(id));
|
||||
self.0.sync(wrap_snapshot, edits);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
pub mod actions;
|
||||
mod blame_entry_tooltip;
|
||||
mod blink_manager;
|
||||
mod clangd_ext;
|
||||
mod debounced_delay;
|
||||
pub mod display_map;
|
||||
mod editor_settings;
|
||||
@@ -30,6 +31,7 @@ mod inlay_hint_cache;
|
||||
mod inline_completion_provider;
|
||||
pub mod items;
|
||||
mod linked_editing_ranges;
|
||||
mod lsp_ext;
|
||||
mod mouse_context_menu;
|
||||
pub mod movement;
|
||||
mod persistence;
|
||||
@@ -57,7 +59,7 @@ use convert_case::{Case, Casing};
|
||||
use debounced_delay::DebouncedDelay;
|
||||
use display_map::*;
|
||||
pub use display_map::{DisplayPoint, FoldPlaceholder};
|
||||
pub use editor_settings::{CurrentLineHighlight, EditorSettings};
|
||||
pub use editor_settings::{CurrentLineHighlight, EditorSettings, ScrollBeyondLastLine};
|
||||
pub use editor_settings_controls::*;
|
||||
use element::LineWithInvisibles;
|
||||
pub use element::{
|
||||
@@ -97,7 +99,7 @@ use language::{point_to_lsp, BufferRow, Runnable, RunnableRange};
|
||||
use linked_editing_ranges::refresh_linked_ranges;
|
||||
use task::{ResolvedTask, TaskTemplate, TaskVariables};
|
||||
|
||||
use hover_links::{HoverLink, HoveredLinkState, InlayHighlight};
|
||||
use hover_links::{find_file, HoverLink, HoveredLinkState, InlayHighlight};
|
||||
pub use lsp::CompletionContext;
|
||||
use lsp::{
|
||||
CompletionItemKind, CompletionTriggerKind, DiagnosticSeverity, InsertTextFormat,
|
||||
@@ -296,7 +298,8 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(
|
||||
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
|
||||
workspace.register_action(Editor::new_file);
|
||||
workspace.register_action(Editor::new_file_in_direction);
|
||||
workspace.register_action(Editor::new_file_vertical);
|
||||
workspace.register_action(Editor::new_file_horizontal);
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
@@ -372,6 +375,7 @@ pub enum SoftWrap {
|
||||
PreferLine,
|
||||
EditorWidth,
|
||||
Column(u32),
|
||||
Bounded(u32),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -462,6 +466,14 @@ struct ResolvedTasks {
|
||||
struct MultiBufferOffset(usize);
|
||||
#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
|
||||
struct BufferOffset(usize);
|
||||
|
||||
// Addons allow storing per-editor state in other crates (e.g. Vim)
|
||||
pub trait Addon: 'static {
|
||||
fn extend_key_context(&self, _: &mut KeyContext, _: &AppContext) {}
|
||||
|
||||
fn to_any(&self) -> &dyn std::any::Any;
|
||||
}
|
||||
|
||||
/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`]
|
||||
///
|
||||
/// See the [module level documentation](self) for more information.
|
||||
@@ -500,6 +512,7 @@ pub struct Editor {
|
||||
show_breadcrumbs: bool,
|
||||
show_gutter: bool,
|
||||
show_line_numbers: Option<bool>,
|
||||
use_relative_line_numbers: Option<bool>,
|
||||
show_git_diff_gutter: Option<bool>,
|
||||
show_code_actions: Option<bool>,
|
||||
show_runnables: Option<bool>,
|
||||
@@ -533,7 +546,6 @@ pub struct Editor {
|
||||
collapse_matches: bool,
|
||||
autoindent_mode: Option<AutoindentMode>,
|
||||
workspace: Option<(WeakView<Workspace>, Option<WorkspaceId>)>,
|
||||
keymap_context_layers: BTreeMap<TypeId, KeyContext>,
|
||||
input_enabled: bool,
|
||||
use_modal_editing: bool,
|
||||
read_only: bool,
|
||||
@@ -544,14 +556,13 @@ pub struct Editor {
|
||||
hovered_link_state: Option<HoveredLinkState>,
|
||||
inline_completion_provider: Option<RegisteredInlineCompletionProvider>,
|
||||
active_inline_completion: Option<(Inlay, Option<Range<Anchor>>)>,
|
||||
show_inline_completions: bool,
|
||||
show_inline_completions_override: Option<bool>,
|
||||
inlay_hint_cache: InlayHintCache,
|
||||
expanded_hunks: ExpandedHunks,
|
||||
next_inlay_id: usize,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
pixel_position_of_newest_cursor: Option<gpui::Point<Pixels>>,
|
||||
gutter_dimensions: GutterDimensions,
|
||||
pub vim_replace_map: HashMap<Range<usize>, String>,
|
||||
style: Option<EditorStyle>,
|
||||
next_editor_action_id: EditorActionId,
|
||||
editor_actions: Rc<RefCell<BTreeMap<EditorActionId, Box<dyn Fn(&mut ViewContext<Self>)>>>>,
|
||||
@@ -581,6 +592,7 @@ pub struct Editor {
|
||||
breadcrumb_header: Option<String>,
|
||||
focused_block: Option<FocusedBlock>,
|
||||
next_scroll_position: NextScrollCursorCenterTopBottom,
|
||||
addons: HashMap<TypeId, Box<dyn Addon>>,
|
||||
_scroll_cursor_center_top_bottom_task: Task<()>,
|
||||
}
|
||||
|
||||
@@ -1842,6 +1854,7 @@ impl Editor {
|
||||
show_breadcrumbs: EditorSettings::get_global(cx).toolbar.breadcrumbs,
|
||||
show_gutter: mode == EditorMode::Full,
|
||||
show_line_numbers: None,
|
||||
use_relative_line_numbers: None,
|
||||
show_git_diff_gutter: None,
|
||||
show_code_actions: None,
|
||||
show_runnables: None,
|
||||
@@ -1875,7 +1888,6 @@ impl Editor {
|
||||
autoindent_mode: Some(AutoindentMode::EachLine),
|
||||
collapse_matches: false,
|
||||
workspace: None,
|
||||
keymap_context_layers: Default::default(),
|
||||
input_enabled: true,
|
||||
use_modal_editing: mode == EditorMode::Full,
|
||||
read_only: false,
|
||||
@@ -1900,8 +1912,7 @@ impl Editor {
|
||||
hovered_cursors: Default::default(),
|
||||
next_editor_action_id: EditorActionId::default(),
|
||||
editor_actions: Rc::default(),
|
||||
vim_replace_map: Default::default(),
|
||||
show_inline_completions: mode == EditorMode::Full,
|
||||
show_inline_completions_override: None,
|
||||
custom_context_menu: None,
|
||||
show_git_blame_gutter: false,
|
||||
show_git_blame_inline: false,
|
||||
@@ -1939,6 +1950,7 @@ impl Editor {
|
||||
breadcrumb_header: None,
|
||||
focused_block: None,
|
||||
next_scroll_position: NextScrollCursorCenterTopBottom::default(),
|
||||
addons: HashMap::default(),
|
||||
_scroll_cursor_center_top_bottom_task: Task::ready(()),
|
||||
};
|
||||
this.tasks_update_task = Some(this.refresh_runnables(cx));
|
||||
@@ -1961,13 +1973,13 @@ impl Editor {
|
||||
this
|
||||
}
|
||||
|
||||
pub fn mouse_menu_is_focused(&self, cx: &mut WindowContext) -> bool {
|
||||
pub fn mouse_menu_is_focused(&self, cx: &WindowContext) -> bool {
|
||||
self.mouse_context_menu
|
||||
.as_ref()
|
||||
.is_some_and(|menu| menu.context_menu.focus_handle(cx).is_focused(cx))
|
||||
}
|
||||
|
||||
fn key_context(&self, cx: &AppContext) -> KeyContext {
|
||||
fn key_context(&self, cx: &ViewContext<Self>) -> KeyContext {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("Editor");
|
||||
let mode = match self.mode {
|
||||
@@ -1998,8 +2010,13 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
for layer in self.keymap_context_layers.values() {
|
||||
key_context.extend(layer);
|
||||
// Disable vim contexts when a sub-editor (e.g. rename/inline assistant) is focused.
|
||||
if !self.focus_handle(cx).contains_focused(cx)
|
||||
|| (self.is_focused(cx) || self.mouse_menu_is_focused(cx))
|
||||
{
|
||||
for addon in self.addons.values() {
|
||||
addon.extend_key_context(&mut key_context, cx)
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(extension) = self
|
||||
@@ -2055,14 +2072,29 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_file_in_direction(
|
||||
fn new_file_vertical(
|
||||
workspace: &mut Workspace,
|
||||
action: &workspace::NewFileInDirection,
|
||||
_: &workspace::NewFileSplitVertical,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
Self::new_file_in_direction(workspace, SplitDirection::vertical(cx), cx)
|
||||
}
|
||||
|
||||
fn new_file_horizontal(
|
||||
workspace: &mut Workspace,
|
||||
_: &workspace::NewFileSplitHorizontal,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
Self::new_file_in_direction(workspace, SplitDirection::horizontal(cx), cx)
|
||||
}
|
||||
|
||||
fn new_file_in_direction(
|
||||
workspace: &mut Workspace,
|
||||
direction: SplitDirection,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
let project = workspace.project().clone();
|
||||
let create = project.update(cx, |project, cx| project.create_buffer(cx));
|
||||
let direction = action.0;
|
||||
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
let buffer = create.await?;
|
||||
@@ -2188,7 +2220,7 @@ impl Editor {
|
||||
}),
|
||||
provider: Arc::new(provider),
|
||||
});
|
||||
self.refresh_inline_completion(false, cx);
|
||||
self.refresh_inline_completion(false, false, cx);
|
||||
}
|
||||
|
||||
pub fn placeholder_text(&self, _cx: &WindowContext) -> Option<&str> {
|
||||
@@ -2241,21 +2273,6 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_keymap_context_layer<Tag: 'static>(
|
||||
&mut self,
|
||||
context: KeyContext,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.keymap_context_layers
|
||||
.insert(TypeId::of::<Tag>(), context);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn remove_keymap_context_layer<Tag: 'static>(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.keymap_context_layers.remove(&TypeId::of::<Tag>());
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_input_enabled(&mut self, input_enabled: bool) {
|
||||
self.input_enabled = input_enabled;
|
||||
}
|
||||
@@ -2288,8 +2305,49 @@ impl Editor {
|
||||
self.auto_replace_emoji_shortcode = auto_replace;
|
||||
}
|
||||
|
||||
pub fn set_show_inline_completions(&mut self, show_inline_completions: bool) {
|
||||
self.show_inline_completions = show_inline_completions;
|
||||
pub fn toggle_inline_completions(
|
||||
&mut self,
|
||||
_: &ToggleInlineCompletions,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if self.show_inline_completions_override.is_some() {
|
||||
self.set_show_inline_completions(None, cx);
|
||||
} else {
|
||||
let cursor = self.selections.newest_anchor().head();
|
||||
if let Some((buffer, cursor_buffer_position)) =
|
||||
self.buffer.read(cx).text_anchor_for_position(cursor, cx)
|
||||
{
|
||||
let show_inline_completions =
|
||||
!self.should_show_inline_completions(&buffer, cursor_buffer_position, cx);
|
||||
self.set_show_inline_completions(Some(show_inline_completions), cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_show_inline_completions(
|
||||
&mut self,
|
||||
show_inline_completions: Option<bool>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.show_inline_completions_override = show_inline_completions;
|
||||
self.refresh_inline_completion(false, true, cx);
|
||||
}
|
||||
|
||||
fn should_show_inline_completions(
|
||||
&self,
|
||||
buffer: &Model<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &AppContext,
|
||||
) -> bool {
|
||||
if let Some(provider) = self.inline_completion_provider() {
|
||||
if let Some(show_inline_completions) = self.show_inline_completions_override {
|
||||
show_inline_completions
|
||||
} else {
|
||||
self.mode == EditorMode::Full && provider.is_enabled(&buffer, buffer_position, cx)
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_use_modal_editing(&mut self, to: bool) {
|
||||
@@ -3001,6 +3059,17 @@ impl Editor {
|
||||
if start_offset > buffer_snapshot.len() || end_offset > buffer_snapshot.len() {
|
||||
continue;
|
||||
}
|
||||
if self.selections.disjoint_anchor_ranges().iter().any(|s| {
|
||||
if s.start.buffer_id != selection.start.buffer_id
|
||||
|| s.end.buffer_id != selection.end.buffer_id
|
||||
{
|
||||
return false;
|
||||
}
|
||||
TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset
|
||||
&& TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
let start = buffer_snapshot.anchor_after(start_offset);
|
||||
let end = buffer_snapshot.anchor_after(end_offset);
|
||||
linked_edits
|
||||
@@ -3321,7 +3390,7 @@ impl Editor {
|
||||
let trigger_in_words = !had_active_inline_completion;
|
||||
this.trigger_completion_on_input(&text, trigger_in_words, cx);
|
||||
linked_editing_ranges::refresh_linked_ranges(this, cx);
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3507,7 +3576,7 @@ impl Editor {
|
||||
.collect();
|
||||
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(new_selections));
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -4055,7 +4124,7 @@ impl Editor {
|
||||
// hence we do LSP request & edit on host side only — add formats to host's history.
|
||||
let push_to_lsp_host_history = true;
|
||||
// If this is not the host, append its history with new edits.
|
||||
let push_to_client_history = project.read(cx).is_remote();
|
||||
let push_to_client_history = project.read(cx).is_via_collab();
|
||||
|
||||
let on_type_formatting = project.update(cx, |project, cx| {
|
||||
project.on_type_format(
|
||||
@@ -4395,7 +4464,7 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
|
||||
let show_new_completions_on_confirm = completion
|
||||
@@ -4895,17 +4964,18 @@ impl Editor {
|
||||
None
|
||||
}
|
||||
|
||||
fn refresh_inline_completion(
|
||||
pub fn refresh_inline_completion(
|
||||
&mut self,
|
||||
debounce: bool,
|
||||
user_requested: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<()> {
|
||||
let provider = self.inline_completion_provider()?;
|
||||
let cursor = self.selections.newest_anchor().head();
|
||||
let (buffer, cursor_buffer_position) =
|
||||
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
|
||||
if !self.show_inline_completions
|
||||
|| !provider.is_enabled(&buffer, cursor_buffer_position, cx)
|
||||
if !user_requested
|
||||
&& !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)
|
||||
{
|
||||
self.discard_inline_completion(false, cx);
|
||||
return None;
|
||||
@@ -4925,9 +4995,7 @@ impl Editor {
|
||||
let cursor = self.selections.newest_anchor().head();
|
||||
let (buffer, cursor_buffer_position) =
|
||||
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
|
||||
if !self.show_inline_completions
|
||||
|| !provider.is_enabled(&buffer, cursor_buffer_position, cx)
|
||||
{
|
||||
if !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -4939,7 +5007,7 @@ impl Editor {
|
||||
|
||||
pub fn show_inline_completion(&mut self, _: &ShowInlineCompletion, cx: &mut ViewContext<Self>) {
|
||||
if !self.has_active_inline_completion(cx) {
|
||||
self.refresh_inline_completion(false, cx);
|
||||
self.refresh_inline_completion(false, true, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -4968,7 +5036,7 @@ impl Editor {
|
||||
if self.has_active_inline_completion(cx) {
|
||||
self.cycle_inline_completion(Direction::Next, cx);
|
||||
} else {
|
||||
let is_copilot_disabled = self.refresh_inline_completion(false, cx).is_none();
|
||||
let is_copilot_disabled = self.refresh_inline_completion(false, true, cx).is_none();
|
||||
if is_copilot_disabled {
|
||||
cx.propagate();
|
||||
}
|
||||
@@ -4983,7 +5051,7 @@ impl Editor {
|
||||
if self.has_active_inline_completion(cx) {
|
||||
self.cycle_inline_completion(Direction::Prev, cx);
|
||||
} else {
|
||||
let is_copilot_disabled = self.refresh_inline_completion(false, cx).is_none();
|
||||
let is_copilot_disabled = self.refresh_inline_completion(false, true, cx).is_none();
|
||||
if is_copilot_disabled {
|
||||
cx.propagate();
|
||||
}
|
||||
@@ -5011,7 +5079,7 @@ impl Editor {
|
||||
self.change_selections(None, cx, |s| s.select_ranges([range]))
|
||||
}
|
||||
self.insert_with_autoindent_mode(&completion.text.to_string(), None, cx);
|
||||
self.refresh_inline_completion(true, cx);
|
||||
self.refresh_inline_completion(true, true, cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -5047,7 +5115,7 @@ impl Editor {
|
||||
}
|
||||
self.insert_with_autoindent_mode(&partial_completion, None, cx);
|
||||
|
||||
self.refresh_inline_completion(true, cx);
|
||||
self.refresh_inline_completion(true, true, cx);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
@@ -5513,7 +5581,7 @@ impl Editor {
|
||||
this.edit(edits, None, cx);
|
||||
})
|
||||
}
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
linked_editing_ranges::refresh_linked_ranges(this, cx);
|
||||
});
|
||||
}
|
||||
@@ -5532,7 +5600,7 @@ impl Editor {
|
||||
})
|
||||
});
|
||||
this.insert("", cx);
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5619,7 +5687,7 @@ impl Editor {
|
||||
self.transact(cx, |this, cx| {
|
||||
this.buffer.update(cx, |b, cx| b.edit(edits, None, cx));
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(selections));
|
||||
this.refresh_inline_completion(true, cx);
|
||||
this.refresh_inline_completion(true, false, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6787,7 +6855,7 @@ impl Editor {
|
||||
}
|
||||
self.request_autoscroll(Autoscroll::fit(), cx);
|
||||
self.unmark_text(cx);
|
||||
self.refresh_inline_completion(true, cx);
|
||||
self.refresh_inline_completion(true, false, cx);
|
||||
cx.emit(EditorEvent::Edited { transaction_id });
|
||||
cx.emit(EditorEvent::TransactionUndone { transaction_id });
|
||||
}
|
||||
@@ -6808,7 +6876,7 @@ impl Editor {
|
||||
}
|
||||
self.request_autoscroll(Autoscroll::fit(), cx);
|
||||
self.unmark_text(cx);
|
||||
self.refresh_inline_completion(true, cx);
|
||||
self.refresh_inline_completion(true, false, cx);
|
||||
cx.emit(EditorEvent::Edited { transaction_id });
|
||||
}
|
||||
}
|
||||
@@ -8577,7 +8645,7 @@ impl Editor {
|
||||
let hide_runnables = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
// Do not display any test indicators in non-dev server remote projects.
|
||||
project.is_remote() && project.ssh_connection_string(cx).is_none()
|
||||
project.is_via_collab() && project.ssh_connection_string(cx).is_none()
|
||||
})
|
||||
.unwrap_or(true);
|
||||
if hide_runnables {
|
||||
@@ -9179,6 +9247,38 @@ impl Editor {
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn open_file(&mut self, _: &OpenFile, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let position = self.selections.newest_anchor().head();
|
||||
|
||||
let Some((buffer, buffer_position)) =
|
||||
self.buffer.read(cx).text_anchor_for_position(position, cx)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(project) = self.project.clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let result = find_file(&buffer, project, buffer_position, &mut cx).await;
|
||||
|
||||
if let Some((_, path)) = result {
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.open_resolved_path(path, cx)
|
||||
})?
|
||||
.await?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub(crate) fn navigate_to_hover_links(
|
||||
&mut self,
|
||||
kind: Option<GotoDefinitionKind>,
|
||||
@@ -9189,21 +9289,49 @@ impl Editor {
|
||||
// If there is one definition, just open it directly
|
||||
if definitions.len() == 1 {
|
||||
let definition = definitions.pop().unwrap();
|
||||
|
||||
enum TargetTaskResult {
|
||||
Location(Option<Location>),
|
||||
AlreadyNavigated,
|
||||
}
|
||||
|
||||
let target_task = match definition {
|
||||
HoverLink::Text(link) => Task::Ready(Some(Ok(Some(link.target)))),
|
||||
HoverLink::Text(link) => {
|
||||
Task::ready(anyhow::Ok(TargetTaskResult::Location(Some(link.target))))
|
||||
}
|
||||
HoverLink::InlayHint(lsp_location, server_id) => {
|
||||
self.compute_target_location(lsp_location, server_id, cx)
|
||||
let computation = self.compute_target_location(lsp_location, server_id, cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
let location = computation.await?;
|
||||
Ok(TargetTaskResult::Location(location))
|
||||
})
|
||||
}
|
||||
HoverLink::Url(url) => {
|
||||
cx.open_url(&url);
|
||||
Task::ready(Ok(None))
|
||||
Task::ready(Ok(TargetTaskResult::AlreadyNavigated))
|
||||
}
|
||||
HoverLink::File(path) => {
|
||||
if let Some(workspace) = self.workspace() {
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
workspace.open_resolved_path(path, cx)
|
||||
})?
|
||||
.await
|
||||
.map(|_| TargetTaskResult::AlreadyNavigated)
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(TargetTaskResult::Location(None)))
|
||||
}
|
||||
}
|
||||
};
|
||||
cx.spawn(|editor, mut cx| async move {
|
||||
let target = target_task.await.context("target resolution task")?;
|
||||
let Some(target) = target else {
|
||||
return Ok(Navigated::No);
|
||||
let target = match target_task.await.context("target resolution task")? {
|
||||
TargetTaskResult::AlreadyNavigated => return Ok(Navigated::Yes),
|
||||
TargetTaskResult::Location(None) => return Ok(Navigated::No),
|
||||
TargetTaskResult::Location(Some(target)) => target,
|
||||
};
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
let Some(workspace) = editor.workspace() else {
|
||||
return Navigated::No;
|
||||
@@ -9281,6 +9409,7 @@ impl Editor {
|
||||
}),
|
||||
HoverLink::InlayHint(_, _) => None,
|
||||
HoverLink::Url(_) => None,
|
||||
HoverLink::File(_) => None,
|
||||
})
|
||||
.unwrap_or(tab_kind.to_string());
|
||||
let location_tasks = definitions
|
||||
@@ -9291,6 +9420,7 @@ impl Editor {
|
||||
editor.compute_target_location(lsp_location, server_id, cx)
|
||||
}
|
||||
HoverLink::Url(_) => Task::ready(Ok(None)),
|
||||
HoverLink::File(_) => Task::ready(Ok(None)),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(title, location_tasks, editor.workspace().clone())
|
||||
@@ -10413,6 +10543,8 @@ impl Editor {
|
||||
if settings.show_wrap_guides {
|
||||
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
|
||||
wrap_guides.push((soft_wrap as usize, true));
|
||||
} else if let SoftWrap::Bounded(soft_wrap) = self.soft_wrap_mode(cx) {
|
||||
wrap_guides.push((soft_wrap as usize, true));
|
||||
}
|
||||
wrap_guides.extend(settings.wrap_guides.iter().map(|guide| (*guide, false)))
|
||||
}
|
||||
@@ -10432,6 +10564,9 @@ impl Editor {
|
||||
language_settings::SoftWrap::PreferredLineLength => {
|
||||
SoftWrap::Column(settings.preferred_line_length)
|
||||
}
|
||||
language_settings::SoftWrap::Bounded => {
|
||||
SoftWrap::Bounded(settings.preferred_line_length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10473,7 +10608,7 @@ impl Editor {
|
||||
} else {
|
||||
let soft_wrap = match self.soft_wrap_mode(cx) {
|
||||
SoftWrap::None | SoftWrap::PreferLine => language_settings::SoftWrap::EditorWidth,
|
||||
SoftWrap::EditorWidth | SoftWrap::Column(_) => {
|
||||
SoftWrap::EditorWidth | SoftWrap::Column(_) | SoftWrap::Bounded(_) => {
|
||||
language_settings::SoftWrap::PreferLine
|
||||
}
|
||||
};
|
||||
@@ -10515,6 +10650,29 @@ impl Editor {
|
||||
EditorSettings::override_global(editor_settings, cx);
|
||||
}
|
||||
|
||||
pub fn should_use_relative_line_numbers(&self, cx: &WindowContext) -> bool {
|
||||
self.use_relative_line_numbers
|
||||
.unwrap_or(EditorSettings::get_global(cx).relative_line_numbers)
|
||||
}
|
||||
|
||||
pub fn toggle_relative_line_numbers(
|
||||
&mut self,
|
||||
_: &ToggleRelativeLineNumbers,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let is_relative = self.should_use_relative_line_numbers(cx);
|
||||
self.set_relative_line_number(Some(!is_relative), cx)
|
||||
}
|
||||
|
||||
pub fn set_relative_line_number(
|
||||
&mut self,
|
||||
is_relative: Option<bool>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.use_relative_line_numbers = is_relative;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_show_gutter(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
|
||||
self.show_gutter = show_gutter;
|
||||
cx.notify();
|
||||
@@ -10810,6 +10968,17 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn copy_file_location(&mut self, _: &CopyFileLocation, cx: &mut ViewContext<Self>) {
|
||||
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
|
||||
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {
|
||||
if let Some(path) = file.path().to_str() {
|
||||
let selection = self.selections.newest::<Point>(cx).start.row + 1;
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(format!("{path}:{selection}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_permalink_to_line(&mut self, _: &OpenPermalinkToLine, cx: &mut ViewContext<Self>) {
|
||||
let permalink = self.get_permalink_to_line(cx);
|
||||
|
||||
@@ -11335,7 +11504,7 @@ impl Editor {
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let language = buffer.language()?;
|
||||
if project.is_local()
|
||||
if project.is_local_or_ssh()
|
||||
&& project.language_servers_for_buffer(buffer, cx).count() == 0
|
||||
{
|
||||
None
|
||||
@@ -11422,7 +11591,7 @@ impl Editor {
|
||||
|
||||
fn settings_changed(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.tasks_update_task = Some(self.refresh_runnables(cx));
|
||||
self.refresh_inline_completion(true, cx);
|
||||
self.refresh_inline_completion(true, false, cx);
|
||||
self.refresh_inlay_hints(
|
||||
InlayHintRefreshReason::SettingsChange(inlay_hint_settings(
|
||||
self.selections.newest_anchor().head(),
|
||||
@@ -11864,7 +12033,6 @@ impl Editor {
|
||||
self.editor_actions.borrow_mut().insert(
|
||||
id,
|
||||
Box::new(move |cx| {
|
||||
let _view = cx.view().clone();
|
||||
let cx = cx.window_context();
|
||||
let listener = listener.clone();
|
||||
cx.on_action(TypeId::of::<A>(), move |action, phase, cx| {
|
||||
@@ -11950,6 +12118,22 @@ impl Editor {
|
||||
menu.visible() && matches!(menu, ContextMenu::Completions(_))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn register_addon<T: Addon>(&mut self, instance: T) {
|
||||
self.addons
|
||||
.insert(std::any::TypeId::of::<T>(), Box::new(instance));
|
||||
}
|
||||
|
||||
pub fn unregister_addon<T: Addon>(&mut self) {
|
||||
self.addons.remove(&std::any::TypeId::of::<T>());
|
||||
}
|
||||
|
||||
pub fn addon<T: Addon>(&self) -> Option<&T> {
|
||||
let type_id = std::any::TypeId::of::<T>();
|
||||
self.addons
|
||||
.get(&type_id)
|
||||
.and_then(|item| item.to_any().downcast_ref::<T>())
|
||||
}
|
||||
}
|
||||
|
||||
fn hunks_for_selections(
|
||||
|
||||
@@ -7507,6 +7507,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
resolve_provider: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
signature_help_provider: Some(lsp::SignatureHelpOptions::default()),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
@@ -7535,6 +7536,37 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
.await;
|
||||
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
|
||||
|
||||
let _handler = handle_signature_help_request(
|
||||
&mut cx,
|
||||
lsp::SignatureHelp {
|
||||
signatures: vec![lsp::SignatureInformation {
|
||||
label: "test signature".to_string(),
|
||||
documentation: None,
|
||||
parameters: Some(vec![lsp::ParameterInformation {
|
||||
label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
|
||||
documentation: None,
|
||||
}]),
|
||||
active_parameter: None,
|
||||
}],
|
||||
active_signature: None,
|
||||
active_parameter: None,
|
||||
},
|
||||
);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert!(
|
||||
!editor.signature_help_state.is_shown(),
|
||||
"No signature help was called for"
|
||||
);
|
||||
editor.show_signature_help(&ShowSignatureHelp, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
cx.update_editor(|editor, _| {
|
||||
assert!(
|
||||
!editor.signature_help_state.is_shown(),
|
||||
"No signature help should be shown when completions menu is open"
|
||||
);
|
||||
});
|
||||
|
||||
let apply_additional_edits = cx.update_editor(|editor, cx| {
|
||||
editor.context_menu_next(&Default::default(), cx);
|
||||
editor
|
||||
@@ -9090,6 +9122,43 @@ async fn go_to_prev_overlapping_diagnostic(
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_diagnostics_with_links(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
fn func(abˇc def: i32) -> u32 {
|
||||
}
|
||||
"});
|
||||
let project = cx.update_editor(|editor, _| editor.project.clone().unwrap());
|
||||
|
||||
cx.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.update_diagnostics(
|
||||
LanguageServerId(0),
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/root/file").unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 12)),
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message: "we've had problems with <https://link.one>, and <https://link.two> is broken".to_string(),
|
||||
..Default::default()
|
||||
}],
|
||||
},
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}).unwrap();
|
||||
cx.run_until_parked();
|
||||
cx.update_editor(|editor, cx| hover_popover::hover(editor, &Default::default(), cx));
|
||||
cx.run_until_parked();
|
||||
cx.update_editor(|editor, _| assert!(editor.hover_state.diagnostic_popover.is_some()))
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
@@ -165,6 +165,7 @@ impl EditorElement {
|
||||
});
|
||||
|
||||
crate::rust_analyzer_ext::apply_related_actions(view, cx);
|
||||
crate::clangd_ext::apply_related_actions(view, cx);
|
||||
register_action(view, cx, Editor::move_left);
|
||||
register_action(view, cx, Editor::move_right);
|
||||
register_action(view, cx, Editor::move_down);
|
||||
@@ -330,6 +331,7 @@ impl EditorElement {
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
register_action(view, cx, Editor::open_url);
|
||||
register_action(view, cx, Editor::open_file);
|
||||
register_action(view, cx, Editor::fold);
|
||||
register_action(view, cx, Editor::fold_at);
|
||||
register_action(view, cx, Editor::unfold_lines);
|
||||
@@ -342,8 +344,10 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::toggle_soft_wrap);
|
||||
register_action(view, cx, Editor::toggle_tab_bar);
|
||||
register_action(view, cx, Editor::toggle_line_numbers);
|
||||
register_action(view, cx, Editor::toggle_relative_line_numbers);
|
||||
register_action(view, cx, Editor::toggle_indent_guides);
|
||||
register_action(view, cx, Editor::toggle_inlay_hints);
|
||||
register_action(view, cx, Editor::toggle_inline_completions);
|
||||
register_action(view, cx, hover_popover::hover);
|
||||
register_action(view, cx, Editor::reveal_in_finder);
|
||||
register_action(view, cx, Editor::copy_path);
|
||||
@@ -351,6 +355,7 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::copy_highlight_json);
|
||||
register_action(view, cx, Editor::copy_permalink_to_line);
|
||||
register_action(view, cx, Editor::open_permalink_to_line);
|
||||
register_action(view, cx, Editor::copy_file_location);
|
||||
register_action(view, cx, Editor::toggle_git_blame);
|
||||
register_action(view, cx, Editor::toggle_git_blame_inline);
|
||||
register_action(view, cx, Editor::toggle_hunk_diff);
|
||||
@@ -1767,7 +1772,7 @@ impl EditorElement {
|
||||
});
|
||||
let font_size = self.style.text.font_size.to_pixels(cx.rem_size());
|
||||
|
||||
let is_relative = EditorSettings::get_global(cx).relative_line_numbers;
|
||||
let is_relative = editor.should_use_relative_line_numbers(cx);
|
||||
let relative_to = if is_relative {
|
||||
Some(newest_selection_head.row())
|
||||
} else {
|
||||
@@ -4994,7 +4999,8 @@ impl Element for EditorElement {
|
||||
Some((MAX_LINE_LEN / 2) as f32 * em_advance)
|
||||
}
|
||||
SoftWrap::EditorWidth => Some(editor_width),
|
||||
SoftWrap::Column(column) => {
|
||||
SoftWrap::Column(column) => Some(column as f32 * em_advance),
|
||||
SoftWrap::Bounded(column) => {
|
||||
Some(editor_width.min(column as f32 * em_advance))
|
||||
}
|
||||
};
|
||||
@@ -5613,7 +5619,7 @@ impl Element for EditorElement {
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let focus_handle = self.editor.focus_handle(cx);
|
||||
let key_context = self.editor.read(cx).key_context(cx);
|
||||
let key_context = self.editor.update(cx, |editor, cx| editor.key_context(cx));
|
||||
cx.set_key_context(key_context);
|
||||
cx.handle_input(
|
||||
&focus_handle,
|
||||
|
||||
@@ -9,8 +9,8 @@ use language::{Bias, ToOffset};
|
||||
use linkify::{LinkFinder, LinkKind};
|
||||
use lsp::LanguageServerId;
|
||||
use project::{
|
||||
HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink,
|
||||
ResolveState,
|
||||
HoverBlock, HoverBlockKind, InlayHintLabelPartTooltip, InlayHintTooltip, LocationLink, Project,
|
||||
ResolveState, ResolvedPath,
|
||||
};
|
||||
use std::ops::Range;
|
||||
use theme::ActiveTheme as _;
|
||||
@@ -63,6 +63,7 @@ impl RangeInEditor {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum HoverLink {
|
||||
Url(String),
|
||||
File(ResolvedPath),
|
||||
Text(LocationLink),
|
||||
InlayHint(lsp::Location, LanguageServerId),
|
||||
}
|
||||
@@ -522,35 +523,54 @@ pub fn show_link_definition(
|
||||
})
|
||||
.ok()
|
||||
} else if let Some(project) = project {
|
||||
// query the LSP for definition info
|
||||
project
|
||||
.update(&mut cx, |project, cx| match preferred_kind {
|
||||
LinkDefinitionKind::Symbol => {
|
||||
project.definition(&buffer, buffer_position, cx)
|
||||
}
|
||||
if let Some((filename_range, filename)) =
|
||||
find_file(&buffer, project.clone(), buffer_position, &mut cx).await
|
||||
{
|
||||
let range = maybe!({
|
||||
let start =
|
||||
snapshot.anchor_in_excerpt(excerpt_id, filename_range.start)?;
|
||||
let end =
|
||||
snapshot.anchor_in_excerpt(excerpt_id, filename_range.end)?;
|
||||
Some(RangeInEditor::Text(start..end))
|
||||
});
|
||||
|
||||
LinkDefinitionKind::Type => {
|
||||
project.type_definition(&buffer, buffer_position, cx)
|
||||
}
|
||||
})?
|
||||
.await
|
||||
.ok()
|
||||
.map(|definition_result| {
|
||||
(
|
||||
definition_result.iter().find_map(|link| {
|
||||
link.origin.as_ref().and_then(|origin| {
|
||||
let start = snapshot.anchor_in_excerpt(
|
||||
excerpt_id,
|
||||
origin.range.start,
|
||||
)?;
|
||||
let end = snapshot
|
||||
.anchor_in_excerpt(excerpt_id, origin.range.end)?;
|
||||
Some(RangeInEditor::Text(start..end))
|
||||
})
|
||||
}),
|
||||
definition_result.into_iter().map(HoverLink::Text).collect(),
|
||||
)
|
||||
})
|
||||
Some((range, vec![HoverLink::File(filename)]))
|
||||
} else {
|
||||
// query the LSP for definition info
|
||||
project
|
||||
.update(&mut cx, |project, cx| match preferred_kind {
|
||||
LinkDefinitionKind::Symbol => {
|
||||
project.definition(&buffer, buffer_position, cx)
|
||||
}
|
||||
|
||||
LinkDefinitionKind::Type => {
|
||||
project.type_definition(&buffer, buffer_position, cx)
|
||||
}
|
||||
})?
|
||||
.await
|
||||
.ok()
|
||||
.map(|definition_result| {
|
||||
(
|
||||
definition_result.iter().find_map(|link| {
|
||||
link.origin.as_ref().and_then(|origin| {
|
||||
let start = snapshot.anchor_in_excerpt(
|
||||
excerpt_id,
|
||||
origin.range.start,
|
||||
)?;
|
||||
let end = snapshot.anchor_in_excerpt(
|
||||
excerpt_id,
|
||||
origin.range.end,
|
||||
)?;
|
||||
Some(RangeInEditor::Text(start..end))
|
||||
})
|
||||
}),
|
||||
definition_result
|
||||
.into_iter()
|
||||
.map(HoverLink::Text)
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -686,6 +706,116 @@ pub(crate) fn find_url(
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) async fn find_file(
|
||||
buffer: &Model<language::Buffer>,
|
||||
project: Model<Project>,
|
||||
position: text::Anchor,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Option<(Range<text::Anchor>, ResolvedPath)> {
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot()).ok()?;
|
||||
|
||||
let (range, candidate_file_path) = surrounding_filename(snapshot, position)?;
|
||||
|
||||
let existing_path = project
|
||||
.update(cx, |project, cx| {
|
||||
project.resolve_existing_file_path(&candidate_file_path, &buffer, cx)
|
||||
})
|
||||
.ok()?
|
||||
.await?;
|
||||
|
||||
Some((range, existing_path))
|
||||
}
|
||||
|
||||
fn surrounding_filename(
|
||||
snapshot: language::BufferSnapshot,
|
||||
position: text::Anchor,
|
||||
) -> Option<(Range<text::Anchor>, String)> {
|
||||
const LIMIT: usize = 2048;
|
||||
|
||||
let offset = position.to_offset(&snapshot);
|
||||
let mut token_start = offset;
|
||||
let mut token_end = offset;
|
||||
let mut found_start = false;
|
||||
let mut found_end = false;
|
||||
let mut inside_quotes = false;
|
||||
|
||||
let mut filename = String::new();
|
||||
|
||||
let mut backwards = snapshot.reversed_chars_at(offset).take(LIMIT).peekable();
|
||||
while let Some(ch) = backwards.next() {
|
||||
// Escaped whitespace
|
||||
if ch.is_whitespace() && backwards.peek() == Some(&'\\') {
|
||||
filename.push(ch);
|
||||
token_start -= ch.len_utf8();
|
||||
backwards.next();
|
||||
token_start -= '\\'.len_utf8();
|
||||
continue;
|
||||
}
|
||||
if ch.is_whitespace() {
|
||||
found_start = true;
|
||||
break;
|
||||
}
|
||||
if (ch == '"' || ch == '\'') && !inside_quotes {
|
||||
found_start = true;
|
||||
inside_quotes = true;
|
||||
break;
|
||||
}
|
||||
|
||||
filename.push(ch);
|
||||
token_start -= ch.len_utf8();
|
||||
}
|
||||
if !found_start && token_start != 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
filename = filename.chars().rev().collect();
|
||||
|
||||
let mut forwards = snapshot
|
||||
.chars_at(offset)
|
||||
.take(LIMIT - (offset - token_start))
|
||||
.peekable();
|
||||
while let Some(ch) = forwards.next() {
|
||||
// Skip escaped whitespace
|
||||
if ch == '\\' && forwards.peek().map_or(false, |ch| ch.is_whitespace()) {
|
||||
token_end += ch.len_utf8();
|
||||
let whitespace = forwards.next().unwrap();
|
||||
token_end += whitespace.len_utf8();
|
||||
filename.push(whitespace);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ch.is_whitespace() {
|
||||
found_end = true;
|
||||
break;
|
||||
}
|
||||
if ch == '"' || ch == '\'' {
|
||||
// If we're inside quotes, we stop when we come across the next quote
|
||||
if inside_quotes {
|
||||
found_end = true;
|
||||
break;
|
||||
} else {
|
||||
// Otherwise, we skip the quote
|
||||
inside_quotes = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
filename.push(ch);
|
||||
token_end += ch.len_utf8();
|
||||
}
|
||||
|
||||
if !found_end && (token_end - token_start >= LIMIT) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if filename.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let range = snapshot.anchor_before(token_start)..snapshot.anchor_after(token_end);
|
||||
|
||||
Some((range, filename))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -1268,4 +1398,184 @@ mod tests {
|
||||
cx.simulate_click(screen_coord, Modifiers::secondary_key());
|
||||
assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_surrounding_filename(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let test_cases = [
|
||||
("file ˇ name", None),
|
||||
("ˇfile name", Some("file")),
|
||||
("file ˇname", Some("name")),
|
||||
("fiˇle name", Some("file")),
|
||||
("filenˇame", Some("filename")),
|
||||
// Absolute path
|
||||
("foobar ˇ/home/user/f.txt", Some("/home/user/f.txt")),
|
||||
("foobar /home/useˇr/f.txt", Some("/home/user/f.txt")),
|
||||
// Windows
|
||||
("C:\\Useˇrs\\user\\f.txt", Some("C:\\Users\\user\\f.txt")),
|
||||
// Whitespace
|
||||
("ˇfile\\ -\\ name.txt", Some("file - name.txt")),
|
||||
("file\\ -\\ naˇme.txt", Some("file - name.txt")),
|
||||
// Tilde
|
||||
("ˇ~/file.txt", Some("~/file.txt")),
|
||||
("~/fiˇle.txt", Some("~/file.txt")),
|
||||
// Double quotes
|
||||
("\"fˇile.txt\"", Some("file.txt")),
|
||||
("ˇ\"file.txt\"", Some("file.txt")),
|
||||
("ˇ\"fi\\ le.txt\"", Some("fi le.txt")),
|
||||
// Single quotes
|
||||
("'fˇile.txt'", Some("file.txt")),
|
||||
("ˇ'file.txt'", Some("file.txt")),
|
||||
("ˇ'fi\\ le.txt'", Some("fi le.txt")),
|
||||
];
|
||||
|
||||
for (input, expected) in test_cases {
|
||||
cx.set_state(input);
|
||||
|
||||
let (position, snapshot) = cx.editor(|editor, cx| {
|
||||
let positions = editor.selections.newest_anchor().head().text_anchor;
|
||||
let snapshot = editor
|
||||
.buffer()
|
||||
.clone()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.snapshot();
|
||||
(positions, snapshot)
|
||||
});
|
||||
|
||||
let result = surrounding_filename(snapshot, position);
|
||||
|
||||
if let Some(expected) = expected {
|
||||
assert!(result.is_some(), "Failed to find file path: {}", input);
|
||||
let (_, path) = result.unwrap();
|
||||
assert_eq!(&path, expected, "Incorrect file path for input: {}", input);
|
||||
} else {
|
||||
assert!(
|
||||
result.is_none(),
|
||||
"Expected no result, but got one: {:?}",
|
||||
result
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_hover_filenames(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Insert a new file
|
||||
let fs = cx.update_workspace(|workspace, cx| workspace.project().read(cx).fs().clone());
|
||||
fs.as_fake()
|
||||
.insert_file("/root/dir/file2.rs", "This is file2.rs".as_bytes().to_vec())
|
||||
.await;
|
||||
|
||||
cx.set_state(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.ˇ
|
||||
"});
|
||||
|
||||
// File does not exist
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't go to a file that dˇoes_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.
|
||||
"});
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
// No highlight
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert!(editor
|
||||
.snapshot(cx)
|
||||
.text_highlight_ranges::<HoveredLinkState>()
|
||||
.unwrap_or_default()
|
||||
.1
|
||||
.is_empty());
|
||||
});
|
||||
|
||||
// Moving the mouse over a file that does exist should highlight it.
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to fˇile2.rs if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.
|
||||
"});
|
||||
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to «file2.rsˇ» if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.
|
||||
"});
|
||||
|
||||
// Moving the mouse over a relative path that does exist should highlight it
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to ../dir/fˇile2.rs if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.
|
||||
"});
|
||||
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to «../dir/file2.rsˇ» if you want.
|
||||
Or go to /root/dir/file2.rs if project is local.
|
||||
"});
|
||||
|
||||
// Moving the mouse over an absolute path that does exist should highlight it
|
||||
let screen_coord = cx.pixel_position(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to /root/diˇr/file2.rs if project is local.
|
||||
"});
|
||||
|
||||
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
|
||||
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
|
||||
You can't go to a file that does_not_exist.txt.
|
||||
Go to file2.rs if you want.
|
||||
Or go to ../dir/file2.rs if you want.
|
||||
Or go to «/root/dir/file2.rsˇ» if project is local.
|
||||
"});
|
||||
|
||||
cx.simulate_click(screen_coord, Modifiers::secondary_key());
|
||||
|
||||
cx.update_workspace(|workspace, cx| assert_eq!(workspace.items(cx).count(), 2));
|
||||
cx.update_workspace(|workspace, cx| {
|
||||
let active_editor = workspace.active_item_as::<Editor>(cx).unwrap();
|
||||
|
||||
let buffer = active_editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap();
|
||||
|
||||
let file = buffer.read(cx).file().unwrap();
|
||||
let file_path = file.as_local().unwrap().abs_path(cx);
|
||||
|
||||
assert_eq!(file_path.to_str().unwrap(), "/root/dir/file2.rs");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -782,7 +782,7 @@ fn editor_with_deleted_text(
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.highlight_rows::<DiffRowHighlight>(
|
||||
Anchor::min()..=Anchor::max(),
|
||||
Some(deleted_color),
|
||||
|
||||
@@ -1144,16 +1144,37 @@ pub(crate) enum BufferSearchHighlights {}
|
||||
impl SearchableItem for Editor {
|
||||
type Match = Range<Anchor>;
|
||||
|
||||
fn get_matches(&self, _: &mut WindowContext) -> Vec<Range<Anchor>> {
|
||||
self.background_highlights
|
||||
.get(&TypeId::of::<BufferSearchHighlights>())
|
||||
.map_or(Vec::new(), |(_color, ranges)| {
|
||||
ranges.iter().map(|range| range.clone()).collect()
|
||||
})
|
||||
}
|
||||
|
||||
fn clear_matches(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.clear_background_highlights::<BufferSearchHighlights>(cx);
|
||||
if self
|
||||
.clear_background_highlights::<BufferSearchHighlights>(cx)
|
||||
.is_some()
|
||||
{
|
||||
cx.emit(SearchEvent::MatchesInvalidated);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, matches: &[Range<Anchor>], cx: &mut ViewContext<Self>) {
|
||||
let existing_range = self
|
||||
.background_highlights
|
||||
.get(&TypeId::of::<BufferSearchHighlights>())
|
||||
.map(|(_, range)| range.as_ref());
|
||||
let updated = existing_range != Some(matches);
|
||||
self.highlight_background::<BufferSearchHighlights>(
|
||||
matches,
|
||||
|theme| theme.search_match_background,
|
||||
cx,
|
||||
);
|
||||
if updated {
|
||||
cx.emit(SearchEvent::MatchesInvalidated);
|
||||
}
|
||||
}
|
||||
|
||||
fn has_filtered_search_ranges(&mut self) -> bool {
|
||||
|
||||
54
crates/editor/src/lsp_ext.rs
Normal file
54
crates/editor/src/lsp_ext.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::Editor;
|
||||
use gpui::{Model, WindowContext};
|
||||
use language::Buffer;
|
||||
use language::Language;
|
||||
use lsp::LanguageServerId;
|
||||
use multi_buffer::Anchor;
|
||||
|
||||
pub(crate) fn find_specific_language_server_in_selection<F>(
|
||||
editor: &Editor,
|
||||
cx: &WindowContext,
|
||||
filter_language: F,
|
||||
language_server_name: &str,
|
||||
) -> Option<(Anchor, Arc<Language>, LanguageServerId, Model<Buffer>)>
|
||||
where
|
||||
F: Fn(&Language) -> bool,
|
||||
{
|
||||
let Some(project) = &editor.project else {
|
||||
return None;
|
||||
};
|
||||
let multibuffer = editor.buffer().read(cx);
|
||||
editor
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.filter(|selection| selection.start == selection.end)
|
||||
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
||||
.filter_map(|(buffer_id, trigger_anchor)| {
|
||||
let buffer = multibuffer.buffer(buffer_id)?;
|
||||
let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
||||
if !filter_language(&language) {
|
||||
return None;
|
||||
}
|
||||
Some((trigger_anchor, language, buffer))
|
||||
})
|
||||
.find_map(|(trigger_anchor, language, buffer)| {
|
||||
project
|
||||
.read(cx)
|
||||
.language_servers_for_buffer(buffer.read(cx), cx)
|
||||
.find_map(|(adapter, server)| {
|
||||
if adapter.name.0.as_ref() == language_server_name {
|
||||
Some((
|
||||
trigger_anchor,
|
||||
Arc::clone(&language),
|
||||
server.server_id(),
|
||||
buffer.clone(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use crate::GoToDeclaration;
|
||||
use crate::{
|
||||
selections_collection::SelectionsCollection, Copy, CopyPermalinkToLine, Cut, DisplayPoint,
|
||||
DisplaySnapshot, Editor, EditorMode, FindAllReferences, GoToDefinition, GoToImplementation,
|
||||
GoToTypeDefinition, Paste, Rename, RevealInFileManager, SelectMode, ToDisplayPoint,
|
||||
ToggleCodeActions,
|
||||
actions::Format, selections_collection::SelectionsCollection, Copy, CopyPermalinkToLine, Cut,
|
||||
DisplayPoint, DisplaySnapshot, Editor, EditorMode, FindAllReferences, GoToDeclaration,
|
||||
GoToDefinition, GoToImplementation, GoToTypeDefinition, Paste, Rename, RevealInFileManager,
|
||||
SelectMode, ToDisplayPoint, ToggleCodeActions,
|
||||
};
|
||||
use gpui::prelude::FluentBuilder;
|
||||
use gpui::{DismissEvent, Pixels, Point, Subscription, View, ViewContext};
|
||||
@@ -162,12 +161,14 @@ pub fn deploy_context_menu(
|
||||
ui::ContextMenu::build(cx, |menu, _cx| {
|
||||
let builder = menu
|
||||
.on_blur_subscription(Subscription::new(|| {}))
|
||||
.action("Rename Symbol", Box::new(Rename))
|
||||
.action("Go to Definition", Box::new(GoToDefinition))
|
||||
.action("Go to Declaration", Box::new(GoToDeclaration))
|
||||
.action("Go to Type Definition", Box::new(GoToTypeDefinition))
|
||||
.action("Go to Implementation", Box::new(GoToImplementation))
|
||||
.action("Find All References", Box::new(FindAllReferences))
|
||||
.separator()
|
||||
.action("Rename Symbol", Box::new(Rename))
|
||||
.action("Format Buffer", Box::new(Format))
|
||||
.action(
|
||||
"Code Actions",
|
||||
Box::new(ToggleCodeActions {
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{Context, View, ViewContext, VisualContext, WindowContext};
|
||||
use language::Language;
|
||||
@@ -7,22 +5,24 @@ use multi_buffer::MultiBuffer;
|
||||
use project::lsp_ext_command::ExpandMacro;
|
||||
use text::ToPointUtf16;
|
||||
|
||||
use crate::{element::register_action, Editor, ExpandMacroRecursively};
|
||||
use crate::{
|
||||
element::register_action, lsp_ext::find_specific_language_server_in_selection, Editor,
|
||||
ExpandMacroRecursively,
|
||||
};
|
||||
|
||||
static RUST_ANALYZER_NAME: &str = "rust-analyzer";
|
||||
|
||||
fn is_rust_language(language: &Language) -> bool {
|
||||
language.name().as_ref() == "Rust"
|
||||
}
|
||||
|
||||
pub fn apply_related_actions(editor: &View<Editor>, cx: &mut WindowContext) {
|
||||
let is_rust_related = editor.update(cx, |editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.all_buffers()
|
||||
.iter()
|
||||
.any(|b| match b.read(cx).language() {
|
||||
Some(l) => is_rust_language(l),
|
||||
None => false,
|
||||
})
|
||||
});
|
||||
|
||||
if is_rust_related {
|
||||
if editor
|
||||
.update(cx, |e, cx| {
|
||||
find_specific_language_server_in_selection(e, cx, &is_rust_language, RUST_ANALYZER_NAME)
|
||||
})
|
||||
.is_some()
|
||||
{
|
||||
register_action(editor, cx, expand_macro_recursively);
|
||||
}
|
||||
}
|
||||
@@ -42,39 +42,13 @@ pub fn expand_macro_recursively(
|
||||
return;
|
||||
};
|
||||
|
||||
let multibuffer = editor.buffer().read(cx);
|
||||
|
||||
let Some((trigger_anchor, rust_language, server_to_query, buffer)) = editor
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.filter(|selection| selection.start == selection.end)
|
||||
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
||||
.filter_map(|(buffer_id, trigger_anchor)| {
|
||||
let buffer = multibuffer.buffer(buffer_id)?;
|
||||
let rust_language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
||||
if !is_rust_language(&rust_language) {
|
||||
return None;
|
||||
}
|
||||
Some((trigger_anchor, rust_language, buffer))
|
||||
})
|
||||
.find_map(|(trigger_anchor, rust_language, buffer)| {
|
||||
project
|
||||
.read(cx)
|
||||
.language_servers_for_buffer(buffer.read(cx), cx)
|
||||
.find_map(|(adapter, server)| {
|
||||
if adapter.name.0.as_ref() == "rust-analyzer" {
|
||||
Some((
|
||||
trigger_anchor,
|
||||
Arc::clone(&rust_language),
|
||||
server.server_id(),
|
||||
buffer.clone(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
let Some((trigger_anchor, rust_language, server_to_query, buffer)) =
|
||||
find_specific_language_server_in_selection(
|
||||
&editor,
|
||||
cx,
|
||||
&is_rust_language,
|
||||
RUST_ANALYZER_NAME,
|
||||
)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
@@ -120,7 +94,3 @@ pub fn expand_macro_recursively(
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn is_rust_language(language: &Language) -> bool {
|
||||
language.name().as_ref() == "Rust"
|
||||
}
|
||||
|
||||
@@ -505,7 +505,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
if let Some(visible_lines) = self.visible_line_count() {
|
||||
if newest_head.row() < DisplayRow(screen_top.row().0 + visible_lines as u32) {
|
||||
if newest_head.row() <= DisplayRow(screen_top.row().0 + visible_lines as u32) {
|
||||
return Ordering::Equal;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,7 +149,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn show_signature_help(&mut self, _: &ShowSignatureHelp, cx: &mut ViewContext<Self>) {
|
||||
if self.pending_rename.is_some() {
|
||||
if self.pending_rename.is_some() || self.has_active_completions_menu() {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -93,12 +93,21 @@ impl ExtensionBuilder {
|
||||
self.compile_rust_extension(extension_dir, extension_manifest, options)
|
||||
.await
|
||||
.context("failed to compile Rust extension")?;
|
||||
log::info!("compiled Rust extension {}", extension_dir.display());
|
||||
}
|
||||
|
||||
for (grammar_name, grammar_metadata) in &extension_manifest.grammars {
|
||||
log::info!(
|
||||
"compiling grammar {grammar_name} for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
self.compile_grammar(extension_dir, grammar_name.as_ref(), grammar_metadata)
|
||||
.await
|
||||
.with_context(|| format!("failed to compile grammar '{grammar_name}'"))?;
|
||||
log::info!(
|
||||
"compiled grammar {grammar_name} for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
log::info!("finished compiling extension {}", extension_dir.display());
|
||||
@@ -117,7 +126,10 @@ impl ExtensionBuilder {
|
||||
let cargo_toml_content = fs::read_to_string(&extension_dir.join("Cargo.toml"))?;
|
||||
let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
|
||||
|
||||
log::info!("compiling rust extension {}", extension_dir.display());
|
||||
log::info!(
|
||||
"compiling Rust crate for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
let output = Command::new("cargo")
|
||||
.args(["build", "--target", RUST_TARGET])
|
||||
.args(options.release.then_some("--release"))
|
||||
@@ -133,6 +145,11 @@ impl ExtensionBuilder {
|
||||
);
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"compiled Rust crate for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
|
||||
let mut wasm_path = PathBuf::from(extension_dir);
|
||||
wasm_path.extend([
|
||||
"target",
|
||||
@@ -155,6 +172,11 @@ impl ExtensionBuilder {
|
||||
.context("failed to load adapter module")?
|
||||
.validate(true);
|
||||
|
||||
log::info!(
|
||||
"encoding wasm component for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
|
||||
let component_bytes = encoder
|
||||
.encode()
|
||||
.context("failed to encode wasm component")?;
|
||||
@@ -168,9 +190,16 @@ impl ExtensionBuilder {
|
||||
.context("compiled wasm did not contain a valid zed extension api version")?;
|
||||
manifest.lib.version = Some(wasm_extension_api_version);
|
||||
|
||||
fs::write(extension_dir.join("extension.wasm"), &component_bytes)
|
||||
let extension_file = extension_dir.join("extension.wasm");
|
||||
fs::write(extension_file.clone(), &component_bytes)
|
||||
.context("failed to write extension.wasm")?;
|
||||
|
||||
log::info!(
|
||||
"extension {} written to {}",
|
||||
extension_dir.display(),
|
||||
extension_file.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -452,28 +452,34 @@ impl ExtensionsPage {
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
div().overflow_x_hidden().text_ellipsis().child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
.child(Label::new("<>").size(LabelSize::Small)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.children(extension.description.as_ref().map(|description| {
|
||||
Label::new(description.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Default)
|
||||
div().overflow_x_hidden().text_ellipsis().child(
|
||||
Label::new(description.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Default),
|
||||
)
|
||||
}))
|
||||
.children(repository_url.map(|repository_url| {
|
||||
IconButton::new(
|
||||
@@ -547,18 +553,21 @@ impl ExtensionsPage {
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.manifest.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.manifest.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
div().overflow_x_hidden().text_ellipsis().child(
|
||||
Label::new(format!(
|
||||
"{}: {}",
|
||||
if extension.manifest.authors.len() > 1 {
|
||||
"Authors"
|
||||
} else {
|
||||
"Author"
|
||||
},
|
||||
extension.manifest.authors.join(", ")
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new(format!(
|
||||
@@ -573,7 +582,7 @@ impl ExtensionsPage {
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.children(extension.manifest.description.as_ref().map(|description| {
|
||||
h_flex().overflow_x_hidden().child(
|
||||
div().overflow_x_hidden().text_ellipsis().child(
|
||||
Label::new(description.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Default),
|
||||
|
||||
@@ -126,7 +126,7 @@ impl FeedbackModal {
|
||||
.language_for_name("Markdown");
|
||||
|
||||
let project = workspace.project().clone();
|
||||
let is_local_project = project.read(cx).is_local();
|
||||
let is_local_project = project.read(cx).is_local_or_ssh();
|
||||
|
||||
if !is_local_project {
|
||||
struct FeedbackInRemoteProject;
|
||||
@@ -186,7 +186,7 @@ impl FeedbackModal {
|
||||
);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.set_vertical_scroll_margin(5, cx);
|
||||
editor.set_use_modal_editing(false);
|
||||
editor
|
||||
|
||||
@@ -19,7 +19,6 @@ editor.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools = "0.11"
|
||||
menu.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
|
||||
@@ -4,7 +4,7 @@ mod file_finder_tests;
|
||||
mod new_path_prompt;
|
||||
mod open_path_prompt;
|
||||
|
||||
use collections::{BTreeSet, HashMap};
|
||||
use collections::HashMap;
|
||||
use editor::{scroll::Autoscroll, Bias, Editor};
|
||||
use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
|
||||
use gpui::{
|
||||
@@ -12,7 +12,6 @@ use gpui::{
|
||||
FocusableView, Model, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task,
|
||||
View, ViewContext, VisualContext, WeakView,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use new_path_prompt::NewPathPrompt;
|
||||
use open_path_prompt::OpenPathPrompt;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
@@ -166,6 +165,7 @@ pub struct FileFinderDelegate {
|
||||
cancel_flag: Arc<AtomicBool>,
|
||||
history_items: Vec<FoundPath>,
|
||||
separate_history: bool,
|
||||
first_update: bool,
|
||||
}
|
||||
|
||||
/// Use a custom ordering for file finder: the regular one
|
||||
@@ -209,10 +209,29 @@ struct Matches {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
enum Match {
|
||||
History(FoundPath, Option<ProjectPanelOrdMatch>),
|
||||
History {
|
||||
path: FoundPath,
|
||||
panel_match: Option<ProjectPanelOrdMatch>,
|
||||
},
|
||||
Search(ProjectPanelOrdMatch),
|
||||
}
|
||||
|
||||
impl Match {
|
||||
fn path(&self) -> &Arc<Path> {
|
||||
match self {
|
||||
Match::History { path, .. } => &path.project.path,
|
||||
Match::Search(panel_match) => &panel_match.0.path,
|
||||
}
|
||||
}
|
||||
|
||||
fn panel_match(&self) -> Option<&ProjectPanelOrdMatch> {
|
||||
match self {
|
||||
Match::History { panel_match, .. } => panel_match.as_ref(),
|
||||
Match::Search(panel_match) => Some(&panel_match),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Matches {
|
||||
fn len(&self) -> usize {
|
||||
self.matches.len()
|
||||
@@ -222,6 +241,33 @@ impl Matches {
|
||||
self.matches.get(index)
|
||||
}
|
||||
|
||||
fn position(
|
||||
&self,
|
||||
entry: &Match,
|
||||
currently_opened: Option<&FoundPath>,
|
||||
) -> Result<usize, usize> {
|
||||
if let Match::History {
|
||||
path,
|
||||
panel_match: None,
|
||||
} = entry
|
||||
{
|
||||
// Slow case: linear search by path. Should not happen actually,
|
||||
// since we call `position` only if matches set changed, but the query has not changed.
|
||||
// And History entries do not have panel_match if query is empty, so there's no
|
||||
// reason for the matches set to change.
|
||||
self.matches
|
||||
.iter()
|
||||
.position(|m| path.project.path == *m.path())
|
||||
.ok_or(0)
|
||||
} else {
|
||||
self.matches.binary_search_by(|m| {
|
||||
// `reverse()` since if cmp_matches(a, b) == Ordering::Greater, then a is better than b.
|
||||
// And we want the better entries go first.
|
||||
Self::cmp_matches(self.separate_history, currently_opened, &m, &entry).reverse()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn push_new_matches<'a>(
|
||||
&'a mut self,
|
||||
history_items: impl IntoIterator<Item = &'a FoundPath> + Clone,
|
||||
@@ -230,88 +276,95 @@ impl Matches {
|
||||
new_search_matches: impl Iterator<Item = ProjectPanelOrdMatch>,
|
||||
extend_old_matches: bool,
|
||||
) {
|
||||
let no_history_score = 0;
|
||||
let matching_history_paths =
|
||||
matching_history_item_paths(history_items.clone(), currently_opened, query);
|
||||
let new_search_matches = new_search_matches
|
||||
.filter(|path_match| !matching_history_paths.contains_key(&path_match.0.path))
|
||||
let Some(query) = query else {
|
||||
// assuming that if there's no query, then there's no search matches.
|
||||
self.matches.clear();
|
||||
let path_to_entry = |found_path: &FoundPath| Match::History {
|
||||
path: found_path.clone(),
|
||||
panel_match: None,
|
||||
};
|
||||
self.matches
|
||||
.extend(currently_opened.into_iter().map(path_to_entry));
|
||||
|
||||
self.matches.extend(
|
||||
history_items
|
||||
.into_iter()
|
||||
.filter(|found_path| Some(*found_path) != currently_opened)
|
||||
.map(path_to_entry),
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
let new_history_matches = matching_history_items(history_items, currently_opened, query);
|
||||
let new_search_matches: Vec<Match> = new_search_matches
|
||||
.filter(|path_match| !new_history_matches.contains_key(&path_match.0.path))
|
||||
.map(Match::Search)
|
||||
.map(|m| (no_history_score, m));
|
||||
let old_search_matches = self
|
||||
.matches
|
||||
.drain(..)
|
||||
.filter(|_| extend_old_matches)
|
||||
.filter(|m| matches!(m, Match::Search(_)))
|
||||
.map(|m| (no_history_score, m));
|
||||
let history_matches = history_items
|
||||
.into_iter()
|
||||
.chain(currently_opened)
|
||||
.enumerate()
|
||||
.filter_map(|(i, history_item)| {
|
||||
let query_match = matching_history_paths
|
||||
.get(&history_item.project.path)
|
||||
.cloned();
|
||||
let query_match = if query.is_some() {
|
||||
query_match?
|
||||
} else {
|
||||
query_match.flatten()
|
||||
};
|
||||
Some((i + 1, Match::History(history_item.clone(), query_match)))
|
||||
});
|
||||
|
||||
let mut unique_matches = BTreeSet::new();
|
||||
self.matches = old_search_matches
|
||||
.chain(history_matches)
|
||||
.chain(new_search_matches)
|
||||
.filter(|(_, m)| unique_matches.insert(m.clone()))
|
||||
.sorted_by(|(history_score_a, a), (history_score_b, b)| {
|
||||
match (a, b) {
|
||||
// bubble currently opened files to the top
|
||||
(Match::History(path, _), _) if Some(path) == currently_opened => {
|
||||
cmp::Ordering::Less
|
||||
}
|
||||
(_, Match::History(path, _)) if Some(path) == currently_opened => {
|
||||
cmp::Ordering::Greater
|
||||
}
|
||||
|
||||
(Match::History(_, _), Match::Search(_)) if self.separate_history => {
|
||||
cmp::Ordering::Less
|
||||
}
|
||||
(Match::Search(_), Match::History(_, _)) if self.separate_history => {
|
||||
cmp::Ordering::Greater
|
||||
}
|
||||
|
||||
(Match::History(_, match_a), Match::History(_, match_b)) => {
|
||||
match_b.cmp(match_a)
|
||||
}
|
||||
(Match::History(_, match_a), Match::Search(match_b)) => {
|
||||
Some(match_b).cmp(&match_a.as_ref())
|
||||
}
|
||||
(Match::Search(match_a), Match::History(_, match_b)) => {
|
||||
match_b.as_ref().cmp(&Some(match_a))
|
||||
}
|
||||
(Match::Search(match_a), Match::Search(match_b)) => match_b.cmp(match_a),
|
||||
}
|
||||
.then(history_score_a.cmp(history_score_b))
|
||||
})
|
||||
.take(100)
|
||||
.map(|(_, m)| m)
|
||||
.collect();
|
||||
|
||||
if extend_old_matches {
|
||||
// since we take history matches instead of new search matches
|
||||
// and history matches has not changed(since the query has not changed and we do not extend old matches otherwise),
|
||||
// old matches can't contain paths present in history_matches as well.
|
||||
self.matches.retain(|m| matches!(m, Match::Search(_)));
|
||||
} else {
|
||||
self.matches.clear();
|
||||
}
|
||||
|
||||
// At this point we have an unsorted set of new history matches, an unsorted set of new search matches
|
||||
// and a sorted set of old search matches.
|
||||
// It is possible that the new search matches' paths contain some of the old search matches' paths.
|
||||
// History matches' paths are unique, since store in a HashMap by path.
|
||||
// We build a sorted Vec<Match>, eliminating duplicate search matches.
|
||||
// Search matches with the same paths should have equal `ProjectPanelOrdMatch`, so we should
|
||||
// not have any duplicates after building the final list.
|
||||
for new_match in new_history_matches
|
||||
.into_values()
|
||||
.chain(new_search_matches.into_iter())
|
||||
{
|
||||
match self.position(&new_match, currently_opened) {
|
||||
Ok(_duplicate) => continue,
|
||||
Err(i) => {
|
||||
self.matches.insert(i, new_match);
|
||||
if self.matches.len() == 100 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If a < b, then a is a worse match, aligning with the `ProjectPanelOrdMatch` ordering.
|
||||
fn cmp_matches(
|
||||
separate_history: bool,
|
||||
currently_opened: Option<&FoundPath>,
|
||||
a: &Match,
|
||||
b: &Match,
|
||||
) -> cmp::Ordering {
|
||||
debug_assert!(a.panel_match().is_some() && b.panel_match().is_some());
|
||||
|
||||
match (&a, &b) {
|
||||
// bubble currently opened files to the top
|
||||
(Match::History { path, .. }, _) if Some(path) == currently_opened => {
|
||||
cmp::Ordering::Greater
|
||||
}
|
||||
(_, Match::History { path, .. }) if Some(path) == currently_opened => {
|
||||
cmp::Ordering::Less
|
||||
}
|
||||
|
||||
(Match::History { .. }, Match::Search(_)) if separate_history => cmp::Ordering::Greater,
|
||||
(Match::Search(_), Match::History { .. }) if separate_history => cmp::Ordering::Less,
|
||||
|
||||
_ => a.panel_match().cmp(&b.panel_match()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn matching_history_item_paths<'a>(
|
||||
fn matching_history_items<'a>(
|
||||
history_items: impl IntoIterator<Item = &'a FoundPath>,
|
||||
currently_opened: Option<&'a FoundPath>,
|
||||
query: Option<&FileSearchQuery>,
|
||||
) -> HashMap<Arc<Path>, Option<ProjectPanelOrdMatch>> {
|
||||
let Some(query) = query else {
|
||||
return history_items
|
||||
.into_iter()
|
||||
.chain(currently_opened)
|
||||
.map(|found_path| (Arc::clone(&found_path.project.path), None))
|
||||
.collect();
|
||||
};
|
||||
query: &FileSearchQuery,
|
||||
) -> HashMap<Arc<Path>, Match> {
|
||||
let mut candidates_paths = HashMap::default();
|
||||
|
||||
let history_items_by_worktrees = history_items
|
||||
.into_iter()
|
||||
@@ -333,6 +386,7 @@ fn matching_history_item_paths<'a>(
|
||||
.chars(),
|
||||
),
|
||||
};
|
||||
candidates_paths.insert(Arc::clone(&found_path.project.path), found_path);
|
||||
Some((found_path.project.worktree_id, candidate))
|
||||
})
|
||||
.fold(
|
||||
@@ -358,9 +412,15 @@ fn matching_history_item_paths<'a>(
|
||||
)
|
||||
.into_iter()
|
||||
.map(|path_match| {
|
||||
let (_, found_path) = candidates_paths
|
||||
.remove_entry(&path_match.path)
|
||||
.expect("candidate info not found");
|
||||
(
|
||||
Arc::clone(&path_match.path),
|
||||
Some(ProjectPanelOrdMatch(path_match)),
|
||||
Match::History {
|
||||
path: found_path.clone(),
|
||||
panel_match: Some(ProjectPanelOrdMatch(path_match)),
|
||||
},
|
||||
)
|
||||
}),
|
||||
);
|
||||
@@ -439,6 +499,7 @@ impl FileFinderDelegate {
|
||||
cancel_flag: Arc::new(AtomicBool::new(false)),
|
||||
history_items,
|
||||
separate_history,
|
||||
first_update: true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -524,12 +585,19 @@ impl FileFinderDelegate {
|
||||
) {
|
||||
if search_id >= self.latest_search_id {
|
||||
self.latest_search_id = search_id;
|
||||
let extend_old_matches = self.latest_search_did_cancel
|
||||
&& Some(query.path_query())
|
||||
== self
|
||||
.latest_search_query
|
||||
.as_ref()
|
||||
.map(|query| query.path_query());
|
||||
let query_changed = Some(query.path_query())
|
||||
!= self
|
||||
.latest_search_query
|
||||
.as_ref()
|
||||
.map(|query| query.path_query());
|
||||
let extend_old_matches = self.latest_search_did_cancel && !query_changed;
|
||||
|
||||
let selected_match = if query_changed {
|
||||
None
|
||||
} else {
|
||||
self.matches.get(self.selected_index).cloned()
|
||||
};
|
||||
|
||||
self.matches.push_new_matches(
|
||||
&self.history_items,
|
||||
self.currently_opened_path.as_ref(),
|
||||
@@ -537,9 +605,19 @@ impl FileFinderDelegate {
|
||||
matches.into_iter(),
|
||||
extend_old_matches,
|
||||
);
|
||||
|
||||
self.selected_index = selected_match.map_or_else(
|
||||
|| self.calculate_selected_index(),
|
||||
|m| {
|
||||
self.matches
|
||||
.position(&m, self.currently_opened_path.as_ref())
|
||||
.unwrap_or(0)
|
||||
},
|
||||
);
|
||||
|
||||
self.latest_search_query = Some(query);
|
||||
self.latest_search_did_cancel = did_cancel;
|
||||
self.selected_index = self.calculate_selected_index();
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
@@ -550,10 +628,13 @@ impl FileFinderDelegate {
|
||||
cx: &AppContext,
|
||||
ix: usize,
|
||||
) -> (String, Vec<usize>, String, Vec<usize>) {
|
||||
let (file_name, file_name_positions, full_path, full_path_positions) = match path_match {
|
||||
Match::History(found_path, found_path_match) => {
|
||||
let worktree_id = found_path.project.worktree_id;
|
||||
let project_relative_path = &found_path.project.path;
|
||||
let (file_name, file_name_positions, full_path, full_path_positions) = match &path_match {
|
||||
Match::History {
|
||||
path: entry_path,
|
||||
panel_match,
|
||||
} => {
|
||||
let worktree_id = entry_path.project.worktree_id;
|
||||
let project_relative_path = &entry_path.project.path;
|
||||
let has_worktree = self
|
||||
.project
|
||||
.read(cx)
|
||||
@@ -561,7 +642,7 @@ impl FileFinderDelegate {
|
||||
.is_some();
|
||||
|
||||
if !has_worktree {
|
||||
if let Some(absolute_path) = &found_path.absolute {
|
||||
if let Some(absolute_path) = &entry_path.absolute {
|
||||
return (
|
||||
absolute_path
|
||||
.file_name()
|
||||
@@ -579,7 +660,7 @@ impl FileFinderDelegate {
|
||||
|
||||
let mut path = Arc::clone(project_relative_path);
|
||||
if project_relative_path.as_ref() == Path::new("") {
|
||||
if let Some(absolute_path) = &found_path.absolute {
|
||||
if let Some(absolute_path) = &entry_path.absolute {
|
||||
path = Arc::from(absolute_path.as_path());
|
||||
}
|
||||
}
|
||||
@@ -593,7 +674,7 @@ impl FileFinderDelegate {
|
||||
path_prefix: "".into(),
|
||||
distance_to_relative_ancestor: usize::MAX,
|
||||
};
|
||||
if let Some(found_path_match) = found_path_match {
|
||||
if let Some(found_path_match) = &panel_match {
|
||||
path_match
|
||||
.positions
|
||||
.extend(found_path_match.0.positions.iter())
|
||||
@@ -718,7 +799,7 @@ impl FileFinderDelegate {
|
||||
|
||||
/// Skips first history match (that is displayed topmost) if it's currently opened.
|
||||
fn calculate_selected_index(&self) -> usize {
|
||||
if let Some(Match::History(path, _)) = self.matches.get(0) {
|
||||
if let Some(Match::History { path, .. }) = self.matches.get(0) {
|
||||
if Some(path) == self.currently_opened_path.as_ref() {
|
||||
let elements_after_first = self.matches.len() - 1;
|
||||
if elements_after_first > 0 {
|
||||
@@ -726,6 +807,7 @@ impl FileFinderDelegate {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
0
|
||||
}
|
||||
}
|
||||
@@ -758,7 +840,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
.matches
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, m)| !matches!(m, Match::History(_, _)))
|
||||
.find(|(_, m)| !matches!(m, Match::History { .. }))
|
||||
.map(|(i, _)| i);
|
||||
if let Some(first_non_history_index) = first_non_history_index {
|
||||
if first_non_history_index > 0 {
|
||||
@@ -777,26 +859,34 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
let raw_query = raw_query.replace(' ', "");
|
||||
let raw_query = raw_query.trim();
|
||||
if raw_query.is_empty() {
|
||||
let project = self.project.read(cx);
|
||||
self.latest_search_id = post_inc(&mut self.search_count);
|
||||
self.matches = Matches {
|
||||
separate_history: self.separate_history,
|
||||
..Matches::default()
|
||||
};
|
||||
self.matches.push_new_matches(
|
||||
self.history_items.iter().filter(|history_item| {
|
||||
project
|
||||
.worktree_for_id(history_item.project.worktree_id, cx)
|
||||
.is_some()
|
||||
|| (project.is_local() && history_item.absolute.is_some())
|
||||
}),
|
||||
self.currently_opened_path.as_ref(),
|
||||
None,
|
||||
None.into_iter(),
|
||||
false,
|
||||
);
|
||||
// if there was no query before, and we already have some (history) matches
|
||||
// there's no need to update anything, since nothing has changed.
|
||||
// We also want to populate matches set from history entries on the first update.
|
||||
if self.latest_search_query.is_some() || self.first_update {
|
||||
let project = self.project.read(cx);
|
||||
|
||||
self.selected_index = 0;
|
||||
self.latest_search_id = post_inc(&mut self.search_count);
|
||||
self.latest_search_query = None;
|
||||
self.matches = Matches {
|
||||
separate_history: self.separate_history,
|
||||
..Matches::default()
|
||||
};
|
||||
self.matches.push_new_matches(
|
||||
self.history_items.iter().filter(|history_item| {
|
||||
project
|
||||
.worktree_for_id(history_item.project.worktree_id, cx)
|
||||
.is_some()
|
||||
|| (project.is_local_or_ssh() && history_item.absolute.is_some())
|
||||
}),
|
||||
self.currently_opened_path.as_ref(),
|
||||
None,
|
||||
None.into_iter(),
|
||||
false,
|
||||
);
|
||||
|
||||
self.first_update = false;
|
||||
self.selected_index = 0;
|
||||
}
|
||||
cx.notify();
|
||||
Task::ready(())
|
||||
} else {
|
||||
@@ -843,9 +933,9 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
)
|
||||
}
|
||||
};
|
||||
match m {
|
||||
Match::History(history_match, _) => {
|
||||
let worktree_id = history_match.project.worktree_id;
|
||||
match &m {
|
||||
Match::History { path, .. } => {
|
||||
let worktree_id = path.project.worktree_id;
|
||||
if workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
@@ -856,12 +946,12 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
workspace,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&history_match.project.path),
|
||||
path: Arc::clone(&path.project.path),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
match history_match.absolute.as_ref() {
|
||||
match path.absolute.as_ref() {
|
||||
Some(abs_path) => {
|
||||
if secondary {
|
||||
workspace.split_abs_path(
|
||||
@@ -881,7 +971,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
workspace,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&history_match.project.path),
|
||||
path: Arc::clone(&path.project.path),
|
||||
},
|
||||
cx,
|
||||
),
|
||||
@@ -957,7 +1047,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
.expect("Invalid matches state: no element for index {ix}");
|
||||
|
||||
let icon = match &path_match {
|
||||
Match::History(_, _) => Icon::new(IconName::HistoryRerun)
|
||||
Match::History { .. } => Icon::new(IconName::HistoryRerun)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
|
||||
@@ -1323,6 +1323,62 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/test",
|
||||
json!({
|
||||
"test": {
|
||||
"1.txt": "// One",
|
||||
"2.txt": "// Two",
|
||||
"3.txt": "// Three",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
|
||||
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx));
|
||||
|
||||
open_close_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
|
||||
open_close_queried_buffer("2", 1, "2.txt", &workspace, cx).await;
|
||||
open_close_queried_buffer("3", 1, "3.txt", &workspace, cx).await;
|
||||
|
||||
let picker = open_file_picker(&workspace, cx);
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_selection(finder, 0, "3.txt");
|
||||
assert_match_at_position(finder, 1, "2.txt");
|
||||
assert_match_at_position(finder, 2, "1.txt");
|
||||
});
|
||||
|
||||
cx.dispatch_action(SelectNext);
|
||||
|
||||
// Add more files to the worktree to trigger update matches
|
||||
for i in 0..5 {
|
||||
let filename = format!("/test/{}.txt", 4 + i);
|
||||
app_state
|
||||
.fs
|
||||
.create_file(Path::new(&filename), Default::default())
|
||||
.await
|
||||
.expect("unable to create file");
|
||||
}
|
||||
|
||||
cx.executor().advance_clock(FS_WATCH_LATENCY);
|
||||
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_eq!(finder.delegate.matches.len(), 3);
|
||||
assert_match_at_position(finder, 0, "3.txt");
|
||||
assert_match_selection(finder, 1, "2.txt");
|
||||
assert_match_at_position(finder, 2, "1.txt");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
@@ -1541,6 +1597,107 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_selected_match_stays_selected_after_matches_refreshed(cx: &mut gpui::TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
app_state.fs.as_fake().insert_tree("/src", json!({})).await;
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.create_dir("/src/even".as_ref())
|
||||
.await
|
||||
.expect("unable to create dir");
|
||||
|
||||
let initial_files_num = 5;
|
||||
for i in 0..initial_files_num {
|
||||
let filename = format!("/src/even/file_{}.txt", 10 + i);
|
||||
app_state
|
||||
.fs
|
||||
.create_file(Path::new(&filename), Default::default())
|
||||
.await
|
||||
.expect("unable to create file");
|
||||
}
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
|
||||
|
||||
// Initial state
|
||||
let picker = open_file_picker(&workspace, cx);
|
||||
cx.simulate_input("file");
|
||||
let selected_index = 3;
|
||||
// Checking only the filename, not the whole path
|
||||
let selected_file = format!("file_{}.txt", 10 + selected_index);
|
||||
// Select even/file_13.txt
|
||||
for _ in 0..selected_index {
|
||||
cx.dispatch_action(SelectNext);
|
||||
}
|
||||
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_match_selection(finder, selected_index, &selected_file)
|
||||
});
|
||||
|
||||
// Add more matches to the search results
|
||||
let files_to_add = 10;
|
||||
for i in 0..files_to_add {
|
||||
let filename = format!("/src/file_{}.txt", 20 + i);
|
||||
app_state
|
||||
.fs
|
||||
.create_file(Path::new(&filename), Default::default())
|
||||
.await
|
||||
.expect("unable to create file");
|
||||
}
|
||||
cx.executor().advance_clock(FS_WATCH_LATENCY);
|
||||
|
||||
// file_13.txt is still selected
|
||||
picker.update(cx, |finder, _| {
|
||||
let expected_selected_index = selected_index + files_to_add;
|
||||
assert_match_selection(finder, expected_selected_index, &selected_file);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_first_match_selected_if_previous_one_is_not_in_the_match_list(
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/src",
|
||||
json!({
|
||||
"file_1.txt": "// file_1",
|
||||
"file_2.txt": "// file_2",
|
||||
"file_3.txt": "// file_3",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
|
||||
|
||||
// Initial state
|
||||
let picker = open_file_picker(&workspace, cx);
|
||||
cx.simulate_input("file");
|
||||
// Select even/file_2.txt
|
||||
cx.dispatch_action(SelectNext);
|
||||
|
||||
// Remove the selected entry
|
||||
app_state
|
||||
.fs
|
||||
.remove_file("/src/file_2.txt".as_ref(), Default::default())
|
||||
.await
|
||||
.expect("unable to remove file");
|
||||
cx.executor().advance_clock(FS_WATCH_LATENCY);
|
||||
|
||||
// file_1.txt is now selected
|
||||
picker.update(cx, |finder, _| {
|
||||
assert_match_selection(finder, 0, "file_1.txt");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_keeps_file_finder_open_after_modifier_keys_release(cx: &mut gpui::TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
@@ -1940,8 +2097,11 @@ impl SearchEntries {
|
||||
fn collect_search_matches(picker: &Picker<FileFinderDelegate>) -> SearchEntries {
|
||||
let mut search_entries = SearchEntries::default();
|
||||
for m in &picker.delegate.matches.matches {
|
||||
match m {
|
||||
Match::History(history_path, path_match) => {
|
||||
match &m {
|
||||
Match::History {
|
||||
path: history_path,
|
||||
panel_match: path_match,
|
||||
} => {
|
||||
search_entries.history.push(
|
||||
path_match
|
||||
.as_ref()
|
||||
@@ -1996,8 +2156,8 @@ fn assert_match_at_position(
|
||||
.matches
|
||||
.get(match_index)
|
||||
.unwrap_or_else(|| panic!("Finder has no match for index {match_index}"));
|
||||
let match_file_name = match match_item {
|
||||
Match::History(found_path, _) => found_path.absolute.as_deref().unwrap().file_name(),
|
||||
let match_file_name = match &match_item {
|
||||
Match::History { path, .. } => path.absolute.as_deref().unwrap().file_name(),
|
||||
Match::Search(path_match) => path_match.0.path.file_name(),
|
||||
}
|
||||
.unwrap()
|
||||
|
||||
@@ -107,8 +107,10 @@ impl Match {
|
||||
|
||||
if let Some(path_match) = &self.path_match {
|
||||
text.push_str(&path_match.path.to_string_lossy());
|
||||
let mut whole_path = PathBuf::from(path_match.path_prefix.to_string());
|
||||
whole_path = whole_path.join(path_match.path.clone());
|
||||
for (range, style) in highlight_ranges(
|
||||
&path_match.path.to_string_lossy(),
|
||||
&whole_path.to_string_lossy(),
|
||||
&path_match.positions,
|
||||
gpui::HighlightStyle::color(Color::Accent.color(cx)),
|
||||
) {
|
||||
|
||||
@@ -9,6 +9,9 @@ use std::{fs::File, os::fd::AsFd};
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::fs::FileTypeExt;
|
||||
|
||||
use async_tar::Archive;
|
||||
use futures::{future::BoxFuture, AsyncRead, Stream, StreamExt};
|
||||
use git::repository::{GitRepository, RealGitRepository};
|
||||
@@ -149,6 +152,7 @@ pub struct Metadata {
|
||||
pub mtime: SystemTime,
|
||||
pub is_symlink: bool,
|
||||
pub is_dir: bool,
|
||||
pub is_fifo: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -351,6 +355,16 @@ impl Fs for RealFs {
|
||||
// invalid cross-device link error, and XDG_CACHE_DIR for fallback.
|
||||
// See https://github.com/zed-industries/zed/pull/8437 for more details.
|
||||
NamedTempFile::new_in(path.parent().unwrap_or(&paths::temp_dir()))
|
||||
} else if cfg!(target_os = "windows") {
|
||||
// If temp dir is set to a different drive than the destination,
|
||||
// we receive error:
|
||||
//
|
||||
// failed to persist temporary file:
|
||||
// The system cannot move the file to a different disk drive. (os error 17)
|
||||
//
|
||||
// So we use the directory of the destination as a temp dir to avoid it.
|
||||
// https://github.com/zed-industries/zed/issues/16571
|
||||
NamedTempFile::new_in(path.parent().unwrap_or(&paths::temp_dir()))
|
||||
} else {
|
||||
NamedTempFile::new()
|
||||
}?;
|
||||
@@ -418,11 +432,18 @@ impl Fs for RealFs {
|
||||
#[cfg(windows)]
|
||||
let inode = file_id(path).await?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let is_fifo = false;
|
||||
|
||||
#[cfg(unix)]
|
||||
let is_fifo = metadata.file_type().is_fifo();
|
||||
|
||||
Ok(Some(Metadata {
|
||||
inode,
|
||||
mtime: metadata.modified().unwrap(),
|
||||
is_symlink,
|
||||
is_dir: metadata.file_type().is_dir(),
|
||||
is_fifo,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -825,6 +846,35 @@ impl FakeFs {
|
||||
state.next_mtime = next_mtime;
|
||||
}
|
||||
|
||||
pub async fn touch_path(&self, path: impl AsRef<Path>) {
|
||||
let mut state = self.state.lock();
|
||||
let path = path.as_ref();
|
||||
let new_mtime = state.next_mtime;
|
||||
let new_inode = state.next_inode;
|
||||
state.next_inode += 1;
|
||||
state.next_mtime += Duration::from_nanos(1);
|
||||
state
|
||||
.write_path(path, move |entry| {
|
||||
match entry {
|
||||
btree_map::Entry::Vacant(e) => {
|
||||
e.insert(Arc::new(Mutex::new(FakeFsEntry::File {
|
||||
inode: new_inode,
|
||||
mtime: new_mtime,
|
||||
content: Vec::new(),
|
||||
})));
|
||||
}
|
||||
btree_map::Entry::Occupied(mut e) => match &mut *e.get_mut().lock() {
|
||||
FakeFsEntry::File { mtime, .. } => *mtime = new_mtime,
|
||||
FakeFsEntry::Dir { mtime, .. } => *mtime = new_mtime,
|
||||
FakeFsEntry::Symlink { .. } => {}
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.unwrap();
|
||||
state.emit_event([path.to_path_buf()]);
|
||||
}
|
||||
|
||||
pub async fn insert_file(&self, path: impl AsRef<Path>, content: Vec<u8>) {
|
||||
self.write_file_internal(path, content).unwrap()
|
||||
}
|
||||
@@ -1527,12 +1577,14 @@ impl Fs for FakeFs {
|
||||
mtime: *mtime,
|
||||
is_dir: false,
|
||||
is_symlink,
|
||||
is_fifo: false,
|
||||
},
|
||||
FakeFsEntry::Dir { inode, mtime, .. } => Metadata {
|
||||
inode: *inode,
|
||||
mtime: *mtime,
|
||||
is_dir: true,
|
||||
is_symlink,
|
||||
is_fifo: false,
|
||||
},
|
||||
FakeFsEntry::Symlink { .. } => unreachable!(),
|
||||
}))
|
||||
|
||||
@@ -43,7 +43,7 @@ pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oi
|
||||
String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.split_terminator(MARKER)
|
||||
.map(|str| String::from(str.trim())),
|
||||
.map(|str| str.trim().replace("<", "<").replace(">", ">")),
|
||||
)
|
||||
.collect::<HashMap<Oid, String>>())
|
||||
}
|
||||
|
||||
@@ -36,11 +36,7 @@ pub trait GitRepository: Send + Sync {
|
||||
/// Returns the SHA of the current HEAD.
|
||||
fn head_sha(&self) -> Option<String>;
|
||||
|
||||
fn statuses(&self, path_prefix: &Path) -> Result<GitStatus>;
|
||||
|
||||
fn status(&self, path: &Path) -> Option<GitFileStatus> {
|
||||
Some(self.statuses(path).ok()?.entries.first()?.1)
|
||||
}
|
||||
fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus>;
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>>;
|
||||
fn change_branch(&self, _: &str) -> Result<()>;
|
||||
@@ -126,14 +122,14 @@ impl GitRepository for RealGitRepository {
|
||||
Some(self.repository.lock().head().ok()?.target()?.to_string())
|
||||
}
|
||||
|
||||
fn statuses(&self, path_prefix: &Path) -> Result<GitStatus> {
|
||||
fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus> {
|
||||
let working_directory = self
|
||||
.repository
|
||||
.lock()
|
||||
.workdir()
|
||||
.context("failed to read git work directory")?
|
||||
.to_path_buf();
|
||||
GitStatus::new(&self.git_binary_path, &working_directory, path_prefix)
|
||||
GitStatus::new(&self.git_binary_path, &working_directory, path_prefixes)
|
||||
}
|
||||
|
||||
fn branches(&self) -> Result<Vec<Branch>> {
|
||||
@@ -245,13 +241,16 @@ impl GitRepository for FakeGitRepository {
|
||||
None
|
||||
}
|
||||
|
||||
fn statuses(&self, path_prefix: &Path) -> Result<GitStatus> {
|
||||
fn status(&self, path_prefixes: &[PathBuf]) -> Result<GitStatus> {
|
||||
let state = self.state.lock();
|
||||
let mut entries = state
|
||||
.worktree_statuses
|
||||
.iter()
|
||||
.filter_map(|(repo_path, status)| {
|
||||
if repo_path.0.starts_with(path_prefix) {
|
||||
if path_prefixes
|
||||
.iter()
|
||||
.any(|path_prefix| repo_path.0.starts_with(path_prefix))
|
||||
{
|
||||
Some((repo_path.to_owned(), *status))
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -15,14 +15,10 @@ impl GitStatus {
|
||||
pub(crate) fn new(
|
||||
git_binary: &Path,
|
||||
working_directory: &Path,
|
||||
mut path_prefix: &Path,
|
||||
path_prefixes: &[PathBuf],
|
||||
) -> Result<Self> {
|
||||
let mut child = Command::new(git_binary);
|
||||
|
||||
if path_prefix == Path::new("") {
|
||||
path_prefix = Path::new(".");
|
||||
}
|
||||
|
||||
child
|
||||
.current_dir(working_directory)
|
||||
.args([
|
||||
@@ -32,7 +28,13 @@ impl GitStatus {
|
||||
"--untracked-files=all",
|
||||
"-z",
|
||||
])
|
||||
.arg(path_prefix)
|
||||
.args(path_prefixes.iter().map(|path_prefix| {
|
||||
if *path_prefix == Path::new("") {
|
||||
Path::new(".")
|
||||
} else {
|
||||
path_prefix
|
||||
}
|
||||
}))
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped());
|
||||
|
||||
@@ -15,6 +15,7 @@ default = []
|
||||
test-support = [
|
||||
"backtrace",
|
||||
"collections/test-support",
|
||||
"rand",
|
||||
"util/test-support",
|
||||
"http_client/test-support",
|
||||
]
|
||||
@@ -36,7 +37,6 @@ bytemuck = { version = "1", optional = true }
|
||||
collections.workspace = true
|
||||
ctor.workspace = true
|
||||
derive_more.workspace = true
|
||||
env_logger.workspace = true
|
||||
etagere = "0.2"
|
||||
futures.workspace = true
|
||||
gpui_macros.workspace = true
|
||||
@@ -50,7 +50,7 @@ parking = "2.0.0"
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
profiling.workspace = true
|
||||
rand.workspace = true
|
||||
rand = { optional = true, workspace = true}
|
||||
raw-window-handle = "0.6"
|
||||
refineable.workspace = true
|
||||
resvg = { version = "0.41.0", default-features = false }
|
||||
@@ -68,7 +68,6 @@ strum.workspace = true
|
||||
sum_tree.workspace = true
|
||||
taffy = "0.4.3"
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
waker-fn = "1.2.0"
|
||||
@@ -76,6 +75,8 @@ waker-fn = "1.2.0"
|
||||
[dev-dependencies]
|
||||
backtrace = "0.3"
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
rand.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
unicode-segmentation.workspace = true
|
||||
@@ -152,6 +153,7 @@ font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "40391b7"
|
||||
x11-clipboard = "0.9.2"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
rand.workspace = true
|
||||
windows.workspace = true
|
||||
windows-core = "0.58"
|
||||
|
||||
@@ -178,3 +180,7 @@ path = "examples/input.rs"
|
||||
[[example]]
|
||||
name = "shadow"
|
||||
path = "examples/shadow.rs"
|
||||
|
||||
[[example]]
|
||||
name = "text_wrapper"
|
||||
path = "examples/text_wrapper.rs"
|
||||
|
||||
59
crates/gpui/examples/text_wrapper.rs
Normal file
59
crates/gpui/examples/text_wrapper.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use gpui::*;
|
||||
|
||||
struct HelloWorld {}
|
||||
|
||||
impl Render for HelloWorld {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let text = "The longest word in any of the major English language 以及中文的测试 dictionaries is pneumonoultramicroscopicsilicovolcanoconiosis, a word that refers to a lung disease contracted from the inhalation of very fine silica particles, specifically from a volcano; medically, it is the same as silicosis.";
|
||||
div()
|
||||
.id("page")
|
||||
.size_full()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.p_2()
|
||||
.gap_2()
|
||||
.bg(gpui::white())
|
||||
.child(
|
||||
div()
|
||||
.text_xl()
|
||||
.overflow_hidden()
|
||||
.text_ellipsis()
|
||||
.border_1()
|
||||
.border_color(gpui::red())
|
||||
.child("ELLIPSIS: ".to_owned() + text),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.text_xl()
|
||||
.overflow_hidden()
|
||||
.truncate()
|
||||
.border_1()
|
||||
.border_color(gpui::green())
|
||||
.child("TRUNCATE: ".to_owned() + text),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.text_xl()
|
||||
.whitespace_nowrap()
|
||||
.overflow_hidden()
|
||||
.border_1()
|
||||
.border_color(gpui::blue())
|
||||
.child("NOWRAP: ".to_owned() + text),
|
||||
)
|
||||
.child(div().text_xl().w_full().child(text))
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
App::new().run(|cx: &mut AppContext| {
|
||||
let bounds = Bounds::centered(None, size(px(600.0), px(480.0)), cx);
|
||||
cx.open_window(
|
||||
WindowOptions {
|
||||
window_bounds: Some(WindowBounds::Windowed(bounds)),
|
||||
..Default::default()
|
||||
},
|
||||
|cx| cx.new_view(|_cx| HelloWorld {}),
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
}
|
||||
@@ -6,7 +6,7 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
rc::{Rc, Weak},
|
||||
sync::{atomic::Ordering::SeqCst, Arc},
|
||||
time::{Duration, Instant},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
@@ -142,12 +142,6 @@ impl App {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets a start time for tracking time to first window draw.
|
||||
pub fn measure_time_to_first_window_draw(self, start: Instant) -> Self {
|
||||
self.0.borrow_mut().time_to_first_window_draw = Some(TimeToFirstWindowDraw::Pending(start));
|
||||
self
|
||||
}
|
||||
|
||||
/// Start the application. The provided callback will be called once the
|
||||
/// app is fully launched.
|
||||
pub fn run<F>(self, on_finish_launching: F)
|
||||
@@ -253,7 +247,6 @@ pub struct AppContext {
|
||||
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
|
||||
pub(crate) propagate_event: bool,
|
||||
pub(crate) prompt_builder: Option<PromptBuilder>,
|
||||
pub(crate) time_to_first_window_draw: Option<TimeToFirstWindowDraw>,
|
||||
}
|
||||
|
||||
impl AppContext {
|
||||
@@ -307,7 +300,6 @@ impl AppContext {
|
||||
layout_id_buffer: Default::default(),
|
||||
propagate_event: true,
|
||||
prompt_builder: Some(PromptBuilder::Default),
|
||||
time_to_first_window_draw: None,
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -1310,14 +1302,6 @@ impl AppContext {
|
||||
|
||||
(task, is_first)
|
||||
}
|
||||
|
||||
/// Returns the time to first window draw, if available.
|
||||
pub fn time_to_first_window_draw(&self) -> Option<Duration> {
|
||||
match self.time_to_first_window_draw {
|
||||
Some(TimeToFirstWindowDraw::Done(duration)) => Some(duration),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Context for AppContext {
|
||||
@@ -1481,15 +1465,6 @@ impl<G: Global> DerefMut for GlobalLease<G> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the initialization duration of the application.
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum TimeToFirstWindowDraw {
|
||||
/// The application is still initializing, and contains the start time.
|
||||
Pending(Instant),
|
||||
/// The application has finished initializing, and contains the total duration.
|
||||
Done(Duration),
|
||||
}
|
||||
|
||||
/// Contains state associated with an active drag operation, started by dragging an element
|
||||
/// within the window or by dragging into the app from the underlying platform.
|
||||
pub struct AnyDrag {
|
||||
|
||||
@@ -30,7 +30,7 @@ impl AssetSource for () {
|
||||
|
||||
/// A unique identifier for the image cache
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct ImageId(usize);
|
||||
pub struct ImageId(pub usize);
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
pub(crate) struct RenderImageParams {
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
//!
|
||||
//! # Element Basics
|
||||
//!
|
||||
//! Elements are constructed by calling [`Render::render()`] on the root view of the window, which
|
||||
//! Elements are constructed by calling [`Render::render()`] on the root view of the window,
|
||||
//! which recursively constructs the element tree from the current state of the application,.
|
||||
//! These elements are then laid out by Taffy, and painted to the screen according to their own
|
||||
//! implementation of [`Element::paint()`]. Before the start of the next frame, the entire element
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user