Compare commits
165 Commits
fix-more-l
...
buffer-fon
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ef7b9653a2 | ||
|
|
98533079e4 | ||
|
|
27ba165046 | ||
|
|
32806b8320 | ||
|
|
3ab9700155 | ||
|
|
9d96ae6e78 | ||
|
|
8d7f5eab79 | ||
|
|
f6c85b28d5 | ||
|
|
ea4419076e | ||
|
|
edb1ea2433 | ||
|
|
86aa352ad9 | ||
|
|
253aa28375 | ||
|
|
165d6b9edb | ||
|
|
0ac31302d3 | ||
|
|
176f440158 | ||
|
|
c6028f6651 | ||
|
|
c38f72d194 | ||
|
|
47f698d5a3 | ||
|
|
bcd2ca6196 | ||
|
|
78d6beee80 | ||
|
|
2d21f6debf | ||
|
|
837b7111b3 | ||
|
|
ea165e134d | ||
|
|
15758c10bf | ||
|
|
2f616fe8eb | ||
|
|
fef0516f5b | ||
|
|
eb6f7c1240 | ||
|
|
36a87d0f5c | ||
|
|
43c115a747 | ||
|
|
859c5279c4 | ||
|
|
13c14d9b96 | ||
|
|
3b68665277 | ||
|
|
339b29ef17 | ||
|
|
d1ad96782c | ||
|
|
b0eda77d73 | ||
|
|
fd3ee5a9d0 | ||
|
|
8cbdd9e0fa | ||
|
|
322f68f3d6 | ||
|
|
195f9d9b24 | ||
|
|
3a6e0bb9b6 | ||
|
|
fdddbfc179 | ||
|
|
3648d79ddb | ||
|
|
081e9b9a60 | ||
|
|
3cf93dfcf6 | ||
|
|
53d0cc6146 | ||
|
|
03d853d344 | ||
|
|
d03f1c4cab | ||
|
|
fc10201ce2 | ||
|
|
7abb63cfda | ||
|
|
664efef76b | ||
|
|
26299fb8c9 | ||
|
|
39e0e26d1d | ||
|
|
4151ba13a1 | ||
|
|
6ac343123d | ||
|
|
c763c8c64b | ||
|
|
bfd9bb8a7c | ||
|
|
8f69eac402 | ||
|
|
3fc08a0610 | ||
|
|
fff197b227 | ||
|
|
6b320b9efe | ||
|
|
a0ee29a806 | ||
|
|
0331fdebd3 | ||
|
|
7dfc7184b1 | ||
|
|
759c65d4bd | ||
|
|
0533923f91 | ||
|
|
b6857ca469 | ||
|
|
132b8aa5c7 | ||
|
|
414058379b | ||
|
|
8205c52d2b | ||
|
|
4fb9f41e69 | ||
|
|
935e0d547e | ||
|
|
cc367d43d6 | ||
|
|
a4566c36a3 | ||
|
|
843aad80c6 | ||
|
|
def87a8d76 | ||
|
|
ee1642a50f | ||
|
|
7c5bc3c26f | ||
|
|
4a3032c5e5 | ||
|
|
f327118e06 | ||
|
|
f9bf60f017 | ||
|
|
0390df27d4 | ||
|
|
cf5a113751 | ||
|
|
7dccbd8e3b | ||
|
|
d009d84ead | ||
|
|
5e44748677 | ||
|
|
d2bf80ca3d | ||
|
|
44aed4a0cb | ||
|
|
e826ef83e2 | ||
|
|
56c0345cf3 | ||
|
|
f1428fea4e | ||
|
|
9b88259b1f | ||
|
|
4d68bf2fa6 | ||
|
|
87c282d8f1 | ||
|
|
134decb75e | ||
|
|
f0d4d71e97 | ||
|
|
bcdae9fefa | ||
|
|
7aef447f47 | ||
|
|
4bdfc12b79 | ||
|
|
4ce5b22989 | ||
|
|
ce5bc399df | ||
|
|
4f9ad300a7 | ||
|
|
3e6a9f6890 | ||
|
|
4944dc9d78 | ||
|
|
c7961b9054 | ||
|
|
c64c2758c0 | ||
|
|
0325bda89a | ||
|
|
3aa242e076 | ||
|
|
518cfdbd56 | ||
|
|
bf9b443b4a | ||
|
|
fe4b345603 | ||
|
|
7b636d9774 | ||
|
|
c851e6edba | ||
|
|
4aaf3459c4 | ||
|
|
b05aa381aa | ||
|
|
ec6efe262f | ||
|
|
6c45bc2b3d | ||
|
|
83364c709b | ||
|
|
4cab4e8a10 | ||
|
|
1737329e84 | ||
|
|
3ae6463869 | ||
|
|
773a3e83ad | ||
|
|
cedbfac844 | ||
|
|
73d8a43c81 | ||
|
|
4a325614f0 | ||
|
|
5d88d9c0d7 | ||
|
|
dde87f6468 | ||
|
|
d306b531c7 | ||
|
|
0f1c2e6f2b | ||
|
|
0861ceaac2 | ||
|
|
1c485a0d05 | ||
|
|
7d1a5d2ddf | ||
|
|
27165e9927 | ||
|
|
1085642c88 | ||
|
|
ee1b1779f1 | ||
|
|
5b4ff74dca | ||
|
|
8e9543aefe | ||
|
|
c0d117182f | ||
|
|
9cbde74274 | ||
|
|
879f361966 | ||
|
|
79272b75e3 | ||
|
|
0ddec2753a | ||
|
|
ccb2d02ce0 | ||
|
|
fc08ea9b0d | ||
|
|
49c53bc0ec | ||
|
|
256b446bdf | ||
|
|
ef3d04efe6 | ||
|
|
469be39a32 | ||
|
|
db5d53d1d1 | ||
|
|
b118b76272 | ||
|
|
57a1b9b2cd | ||
|
|
8eeecdafec | ||
|
|
eb231d0449 | ||
|
|
654504d5ee | ||
|
|
08e8ffcef2 | ||
|
|
027897e003 | ||
|
|
c4ceeb715a | ||
|
|
58aec1de75 | ||
|
|
9aad30a559 | ||
|
|
3a0d3cee87 | ||
|
|
7dbcace839 | ||
|
|
463c16a402 | ||
|
|
5a2a85a7db | ||
|
|
754547f349 | ||
|
|
fe7b12c444 | ||
|
|
8958c9e10f |
8
.github/workflows/danger.yml
vendored
@@ -32,4 +32,10 @@ jobs:
|
||||
- name: Run Danger
|
||||
run: pnpm run --dir script/danger danger ci
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
# This GitHub token is not used, but the value needs to be here to prevent
|
||||
# Danger from throwing an error.
|
||||
GITHUB_TOKEN: "not_a_real_token"
|
||||
# All requests are instead proxied through an instance of
|
||||
# https://github.com/maxdeviant/danger-proxy that allows Danger to securely
|
||||
# authenticate with GitHub while still being able to run on PRs from forks.
|
||||
DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github"
|
||||
|
||||
@@ -9,10 +9,10 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 5393 --github-token ${{ secrets.GITHUB_TOKEN }} --prod
|
||||
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
|
||||
|
||||
@@ -9,10 +9,10 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10.5"
|
||||
python-version: "3.11"
|
||||
architecture: "x64"
|
||||
cache: "pip"
|
||||
- run: pip install -r script/update_top_ranking_issues/requirements.txt
|
||||
- run: python script/update_top_ranking_issues/main.py 6952 --github-token ${{ secrets.GITHUB_TOKEN }} --prod --query-day-interval 7
|
||||
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
[
|
||||
{
|
||||
"label": "clippy",
|
||||
"command": "cargo",
|
||||
"args": ["xtask", "clippy"]
|
||||
}
|
||||
]
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor!
|
||||
|
||||
We want to avoid anyone spending time on a pull request that may not be accepted, so we suggest you discuss your ideas with the team and community before starting on major changes. Bug fixes, however, are almost always welcome.
|
||||
|
||||
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
|
||||
|
||||
## Contribution ideas
|
||||
|
||||
788
Cargo.lock
generated
24
Cargo.toml
@@ -38,6 +38,7 @@ members = [
|
||||
"crates/google_ai",
|
||||
"crates/gpui",
|
||||
"crates/gpui_macros",
|
||||
"crates/headless",
|
||||
"crates/image_viewer",
|
||||
"crates/install_cli",
|
||||
"crates/journal",
|
||||
@@ -90,6 +91,7 @@ members = [
|
||||
"crates/telemetry_events",
|
||||
"crates/time_format",
|
||||
"crates/ui",
|
||||
"crates/ui_text_field",
|
||||
"crates/util",
|
||||
"crates/vcs_menu",
|
||||
"crates/vim",
|
||||
@@ -102,9 +104,15 @@ members = [
|
||||
"extensions/astro",
|
||||
"extensions/clojure",
|
||||
"extensions/csharp",
|
||||
"extensions/dart",
|
||||
"extensions/elm",
|
||||
"extensions/emmet",
|
||||
"extensions/erlang",
|
||||
"extensions/gleam",
|
||||
"extensions/haskell",
|
||||
"extensions/html",
|
||||
"extensions/lua",
|
||||
"extensions/ocaml",
|
||||
"extensions/php",
|
||||
"extensions/prisma",
|
||||
"extensions/purescript",
|
||||
@@ -158,6 +166,7 @@ go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui" }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
headless = { path = "crates/headless" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
journal = { path = "crates/journal" }
|
||||
@@ -211,6 +220,7 @@ theme_selector = { path = "crates/theme_selector" }
|
||||
telemetry_events = { path = "crates/telemetry_events" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_text_field = { path = "crates/ui_text_field" }
|
||||
util = { path = "crates/util" }
|
||||
vcs_menu = { path = "crates/vcs_menu" }
|
||||
vim = { path = "crates/vim" }
|
||||
@@ -220,20 +230,22 @@ zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
|
||||
anyhow = "1.0.57"
|
||||
any_vec = "0.13"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-fs = "1.6"
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
bitflags = "2.4.2"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
|
||||
blade-rwh = { package = "raw-window-handle", version = "0.5" }
|
||||
cap-std = "3.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clickhouse = { version = "0.11.6" }
|
||||
ctor = "0.2.6"
|
||||
ctrlc = "3.4.4"
|
||||
core-foundation = { version = "0.9.3" }
|
||||
core-foundation-sys = "0.8.6"
|
||||
derive_more = "0.99.17"
|
||||
@@ -305,11 +317,8 @@ tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", re
|
||||
tree-sitter-c = "0.20.1"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
|
||||
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
|
||||
tree-sitter-dart = { git = "https://github.com/agent3bood/tree-sitter-dart", rev = "48934e3bf757a9b78f17bdfaa3e2b4284656fdc7" }
|
||||
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
|
||||
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40" }
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
|
||||
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
|
||||
tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
|
||||
@@ -319,14 +328,10 @@ tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex
|
||||
tree-sitter-html = "0.19.0"
|
||||
tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", ref = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" }
|
||||
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
|
||||
tree-sitter-lua = "0.0.14"
|
||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
|
||||
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "7dd29f9616822e5fc259f5b4ae6c4ded9a71a132" }
|
||||
tree-sitter-ocaml = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "4abfdc1c7af2c6c77a370aee974627be1c285b3b" }
|
||||
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
|
||||
tree-sitter-python = "0.20.2"
|
||||
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a" }
|
||||
tree-sitter-regex = "0.20.0"
|
||||
tree-sitter-ruby = "0.20.0"
|
||||
tree-sitter-rust = "0.20.3"
|
||||
@@ -336,6 +341,7 @@ tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", r
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
|
||||
unindent = "0.1.7"
|
||||
unicase = "2.6"
|
||||
unicode-segmentation = "1.10"
|
||||
url = "2.2"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
wasmparser = "0.201"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Zed
|
||||
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
[](https://github.com/zed-industries/ze34actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
@@ -17,7 +17,7 @@ Support for additional platforms is on our [roadmap](https://zed.dev/roadmap):
|
||||
For macOS users, you can also install Zed using [Homebrew](https://brew.sh/):
|
||||
|
||||
```sh
|
||||
brew install zed
|
||||
brew install --cask zed
|
||||
```
|
||||
|
||||
Alternatively, to install the Preview release:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M18 10L21 7L17 3L14 6M18 10L8 20H4V16L14 6M18 10L14 6" stroke="#000000" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="m12 6.668 2-2L11.332 2l-2 2M12 6.668l-6.668 6.664H2.668v-2.664L9.332 4M12 6.668 9.332 4" stroke="black" stroke-width="1" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 379 B After Width: | Height: | Size: 239 B |
4
assets/icons/regex.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="4" cy="11" r="1" fill="#787D87"/>
|
||||
<path d="M9 2.5V5M9 5V7.5M9 5H11.5M9 5H6.5M9 5L10.6667 3.33333M9 5L7.33333 6.6667M9 5L10.6667 6.6667M9 5L7.33333 3.33333" stroke="#787D87" stroke-width="1.25" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 333 B |
@@ -1,4 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M20 17V15.8C20 14.1198 20 13.2798 19.673 12.638C19.3854 12.0735 18.9265 11.6146 18.362 11.327C17.7202 11 16.8802 11 15.2 11H4M4 11L8 7M4 11L8 15" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 468 B |
@@ -1,56 +1,3 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Transformed by: SVG Repo Mixer Tools -->
|
||||
|
||||
<svg
|
||||
width="800px"
|
||||
height="800px"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
version="1.1"
|
||||
id="svg1"
|
||||
sodipodi:docname="reply-svgrepo-com.svg"
|
||||
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<defs
|
||||
id="defs1" />
|
||||
<sodipodi:namedview
|
||||
id="namedview1"
|
||||
pagecolor="#505050"
|
||||
bordercolor="#ffffff"
|
||||
borderopacity="1"
|
||||
inkscape:showpageshadow="0"
|
||||
inkscape:pageopacity="0"
|
||||
inkscape:pagecheckerboard="1"
|
||||
inkscape:deskcolor="#505050"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.39996789"
|
||||
inkscape:cx="435.03492"
|
||||
inkscape:cy="417.53351"
|
||||
inkscape:window-width="1440"
|
||||
inkscape:window-height="847"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="25"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="svg1" />
|
||||
<g
|
||||
id="SVGRepo_bgCarrier"
|
||||
stroke-width="0" />
|
||||
<g
|
||||
id="SVGRepo_tracerCarrier"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
<g
|
||||
id="SVGRepo_iconCarrier"
|
||||
transform="matrix(-1,0,0,1,24.001548,0)">
|
||||
<path
|
||||
d="M 20,17 V 15.8 C 20,14.1198 20,13.2798 19.673,12.638 19.3854,12.0735 18.9265,11.6146 18.362,11.327 17.7202,11 16.8802,11 15.2,11 H 4 m 0,0 4,-4 m -4,4 4,4"
|
||||
stroke="#000000"
|
||||
stroke-width="2"
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
id="path1" />
|
||||
</g>
|
||||
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2.668 11.332v-.797c0-1.12 0-1.683.219-2.11.191-.374.496-.683.87-.874.43-.219.99-.219 2.11-.219h7.469m0 0-2.668-2.664m2.668 2.664L10.668 10" stroke="black" stroke-width="1.33334" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 296 B |
@@ -1,5 +1,5 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9.5 7V9.5M9.5 12V9.5M12 9.5H9.5M7 9.5H9.5M9.5 9.5L11.1667 7.83333M9.5 9.5L7.83333 11.1667M9.5 9.5L11.1667 11.1667M9.5 9.5L7.83333 7.83333" stroke="#11181C" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2.19366 3.84943C2.19188 4.26418 2.32864 4.59864 2.60673 4.84707C2.88052 5.09166 3.25136 5.26933 3.71609 5.3824C3.71616 5.38242 3.71623 5.38243 3.7163 5.38245L4.30919 5.53134L4.30919 5.53134L4.30965 5.53145C4.50649 5.57891 4.67124 5.63133 4.80447 5.68843L4.80469 5.68852C4.93838 5.74508 5.03564 5.81206 5.10001 5.8877L5.10001 5.8877L5.10041 5.88816C5.16432 5.96142 5.19716 6.05222 5.19716 6.16389C5.19716 6.28412 5.1609 6.38933 5.0882 6.48141C5.01496 6.57418 4.91031 6.64838 4.77141 6.70259L4.77121 6.70266C4.63472 6.75659 4.47185 6.7843 4.28146 6.7843C4.08801 6.7843 3.91607 6.75496 3.76491 6.69726C3.61654 6.6382 3.49924 6.55209 3.41132 6.43942C3.3502 6.35821 3.30747 6.26204 3.28375 6.14992C3.26238 6.04888 3.1772 5.96225 3.06518 5.96225H2.26366C2.14682 5.96225 2.04842 6.05919 2.0592 6.18012C2.08842 6.50802 2.1826 6.79102 2.34331 7.02735L2.34352 7.02767C2.53217 7.30057 2.79377 7.50587 3.12633 7.64399L3.12642 7.64402C3.46009 7.78185 3.84993 7.85 4.29476 7.85C4.74293 7.85 5.12859 7.7828 5.45023 7.64651L5.45036 7.64646C5.77328 7.50857 6.02259 7.31417 6.19551 7.06217C6.37037 6.80817 6.4579 6.50901 6.45972 6.16682L6.45972 6.16616C6.4579 5.9333 6.41513 5.72482 6.33012 5.54178C6.2474 5.35987 6.13061 5.20175 5.98007 5.06773C5.83038 4.93448 5.65389 4.82273 5.4511 4.7322C5.24919 4.64206 5.02795 4.57016 4.78757 4.51632L4.29841 4.39935L4.29841 4.39934L4.29771 4.39919C4.18081 4.37301 4.07116 4.34168 3.9687 4.30523C3.86715 4.26734 3.77847 4.22375 3.70232 4.17471C3.62796 4.12508 3.57037 4.06717 3.52849 4.00124C3.49012 3.93815 3.47157 3.86312 3.47481 3.77407L3.47484 3.77407V3.77225C3.47484 3.66563 3.50527 3.57146 3.56612 3.48808C3.6287 3.40475 3.71977 3.33801 3.84235 3.28931L3.84235 3.28932L3.84289 3.28909C3.96465 3.23906 4.1165 3.21304 4.30008 3.21304C4.57006 3.21304 4.77746 3.27105 4.92754 3.38154C5.04235 3.46608 5.11838 3.57594 5.15673 3.71259C5.18352 3.80802 5.26636 3.89142 5.37611 3.89142H6.17259C6.28852 3.89142 6.38806 3.7953 6.37515 3.67382C6.34686 3.4077 6.26051 3.16831 6.1158 2.95658C5.94159 2.70169 5.6982 2.50368 5.38762 2.36201L5.36687 2.4075M2.19366 3.84943C2.19187 3.51004 2.28242 3.21139 2.46644 2.9556L2.46658 2.9554C2.65148 2.70093 2.90447 2.50326 3.22368 2.36179C3.54316 2.2202 3.90494 2.15 4.30807 2.15C4.71809 2.15 5.07841 2.22014 5.38773 2.36206L5.36687 2.4075M2.19366 3.84943C2.19366 3.84951 2.19366 3.84959 2.19366 3.84967L2.24366 3.8494L2.19366 3.84918C2.19366 3.84926 2.19366 3.84935 2.19366 3.84943ZM5.36687 2.4075C5.06537 2.26917 4.71244 2.2 4.30807 2.2C3.91079 2.2 3.55608 2.26917 3.24394 2.4075C2.93179 2.54584 2.68616 2.73827 2.50703 2.9848L3.82389 3.24285L3.82389 3.24285C3.95336 3.18964 4.11209 3.16304 4.30008 3.16304C4.57676 3.16304 4.79579 3.22245 4.95718 3.34128C5.08094 3.43239 5.1635 3.55166 5.20487 3.69908C5.2271 3.77827 5.29386 3.84142 5.37611 3.84142H6.17259C6.26198 3.84142 6.33488 3.76799 6.32543 3.6791C6.29797 3.4208 6.21433 3.18936 6.07452 2.9848C5.90603 2.73827 5.67015 2.54584 5.36687 2.4075ZM4.78958 6.74917C4.64593 6.80592 4.47655 6.8343 4.28146 6.8343C4.08283 6.8343 3.90458 6.80415 3.74674 6.74384C3.59067 6.68177 3.46563 6.59043 3.37163 6.46983L4.78958 6.74917ZM4.78958 6.74917C4.93502 6.69241 5.04764 6.61349 5.12745 6.5124M4.78958 6.74917L5.12745 6.5124M5.12745 6.5124C5.20726 6.4113 5.24716 6.29514 5.24716 6.16389M5.12745 6.5124L5.24716 6.16389M5.24716 6.16389C5.24716 6.04152 5.2108 5.93865 5.13809 5.85529L5.24716 6.16389Z" fill="#687076" stroke="#687076" stroke-width="0.1"/>
|
||||
<path d="M9.5 7V9.5M9.5 9.5V12M9.5 9.5H12M9.5 9.5H7M9.5 9.5L11.1667 7.83333M9.5 9.5L7.83333 11.1667M9.5 9.5L11.1667 11.1667M9.5 9.5L7.83333 7.83333" stroke="#687076" stroke-width="1.25" stroke-linecap="round"/>
|
||||
<path d="M2.19368 3.84945C2.1919 4.2642 2.32866 4.59866 2.60675 4.84709C2.88054 5.09168 3.25138 5.26935 3.71611 5.38242L4.30921 5.53136C4.50605 5.57882 4.67126 5.63135 4.80449 5.68845C4.93818 5.74501 5.03566 5.81208 5.10003 5.88772C5.16394 5.96098 5.19718 6.05224 5.19718 6.16391C5.19718 6.28414 5.16092 6.38935 5.08822 6.48143C5.01498 6.5742 4.91033 6.6484 4.77143 6.70261C4.63494 6.75654 4.47187 6.78432 4.28148 6.78432C4.08803 6.78432 3.91609 6.75498 3.76493 6.69728C3.61656 6.63822 3.49926 6.55211 3.41134 6.43944C3.35022 6.35823 3.30749 6.26206 3.28377 6.14994C3.2624 6.0489 3.17722 5.96227 3.0652 5.96227H2.26368C2.14684 5.96227 2.04844 6.05921 2.05922 6.18014C2.08844 6.50804 2.18262 6.79104 2.34333 7.02737C2.53198 7.30027 2.79379 7.50589 3.12635 7.64401C3.46002 7.78184 3.84995 7.85002 4.29478 7.85002C4.74295 7.85002 5.12861 7.78282 5.45025 7.64653C5.77317 7.50864 6.02261 7.31419 6.19553 7.06219C6.37039 6.80819 6.45792 6.50903 6.45974 6.16684C6.45792 5.93398 6.41515 5.72484 6.33014 5.5418C6.24742 5.35989 6.13063 5.20177 5.98009 5.06775C5.8304 4.9345 5.65391 4.82275 5.45112 4.73222C5.24921 4.64208 5.02797 4.57018 4.78759 4.51634L4.29843 4.39937C4.18153 4.37319 4.07118 4.3417 3.96872 4.30525C3.86717 4.26736 3.77849 4.22377 3.70234 4.17473C3.62798 4.1251 3.57039 4.06719 3.52851 4.00126C3.49014 3.93817 3.47159 3.86314 3.47483 3.77409L3.47486 3.77227C3.47486 3.66565 3.50529 3.57148 3.56614 3.4881C3.62872 3.40477 3.71979 3.33803 3.84237 3.28933C3.96413 3.2393 4.11652 3.21306 4.3001 3.21306C4.57008 3.21306 4.77748 3.27107 4.92756 3.38156C5.04237 3.4661 5.1184 3.57596 5.15675 3.71261C5.18354 3.80804 5.26638 3.89144 5.37613 3.89144H6.17261C6.28854 3.89144 6.38808 3.79532 6.37517 3.67384C6.34688 3.40772 6.26053 3.16833 6.11582 2.9566C5.94161 2.70171 5.69822 2.5037 5.38764 2.36203L5.36689 2.40752M2.19368 3.84945C2.19189 3.51006 2.28244 3.21141 2.46646 2.95562C2.65136 2.70115 2.90449 2.50328 3.2237 2.36181C3.54318 2.22022 3.90496 2.15002 4.30809 2.15002C4.71811 2.15002 5.07832 2.22011 5.38764 2.36203L5.36689 2.40752M4.7896 6.74919C4.93504 6.69243 5.04766 6.61351 5.12747 6.51242ZM4.7896 6.74919L5.12747 6.51242ZM5.12747 6.51242C5.20728 6.41132 5.24718 6.29516 5.24718 6.16391ZM5.12747 6.51242L5.24718 6.16391ZM5.24718 6.16391C5.24718 6.04154 5.21082 5.93867 5.13811 5.85531L5.24718 6.16391Z" fill="#687076"/>
|
||||
<path d="M2.19368 3.84945C2.1919 4.2642 2.32866 4.59866 2.60675 4.84709C2.88054 5.09168 3.25138 5.26935 3.71611 5.38242L4.30921 5.53136C4.50605 5.57882 4.67126 5.63135 4.80449 5.68845C4.93818 5.74501 5.03566 5.81208 5.10003 5.88772C5.16394 5.96098 5.19718 6.05224 5.19718 6.16391C5.19718 6.28414 5.16092 6.38935 5.08822 6.48143C5.01498 6.5742 4.91033 6.6484 4.77143 6.70261C4.63494 6.75654 4.47187 6.78432 4.28148 6.78432C4.08803 6.78432 3.91609 6.75498 3.76493 6.69728C3.61656 6.63822 3.49926 6.55211 3.41134 6.43944C3.35022 6.35823 3.30749 6.26206 3.28377 6.14994C3.2624 6.0489 3.17722 5.96227 3.0652 5.96227H2.26368C2.14684 5.96227 2.04844 6.05921 2.05922 6.18014C2.08844 6.50804 2.18262 6.79104 2.34333 7.02737C2.53198 7.30027 2.79379 7.50589 3.12635 7.64401C3.46002 7.78184 3.84995 7.85002 4.29478 7.85002C4.74295 7.85002 5.12861 7.78282 5.45025 7.64653C5.77317 7.50864 6.02261 7.31419 6.19553 7.06219C6.37039 6.80819 6.45792 6.50903 6.45974 6.16684C6.45792 5.93398 6.41515 5.72484 6.33014 5.5418C6.24742 5.35989 6.13063 5.20177 5.98009 5.06775C5.8304 4.9345 5.65391 4.82275 5.45112 4.73222C5.24921 4.64208 5.02797 4.57018 4.78759 4.51634L4.29843 4.39937C4.18153 4.37319 4.07118 4.3417 3.96872 4.30525C3.86717 4.26736 3.77849 4.22377 3.70234 4.17473C3.62798 4.1251 3.57039 4.06719 3.52851 4.00126C3.49014 3.93817 3.47159 3.86314 3.47483 3.77409L3.47486 3.77227C3.47486 3.66565 3.50529 3.57148 3.56614 3.4881C3.62872 3.40477 3.71979 3.33803 3.84237 3.28933C3.96413 3.2393 4.11652 3.21306 4.3001 3.21306C4.57008 3.21306 4.77748 3.27107 4.92756 3.38156C5.04237 3.4661 5.1184 3.57596 5.15675 3.71261C5.18354 3.80804 5.26638 3.89144 5.37613 3.89144H6.17261C6.28854 3.89144 6.38808 3.79532 6.37517 3.67384C6.34688 3.40772 6.26053 3.16833 6.11582 2.9566C5.94161 2.70171 5.69822 2.5037 5.38764 2.36203M2.19368 3.84945C2.19189 3.51006 2.28244 3.21141 2.46646 2.95562C2.65136 2.70115 2.90449 2.50328 3.2237 2.36181C3.54318 2.22022 3.90496 2.15002 4.30809 2.15002C4.71811 2.15002 5.07832 2.22011 5.38764 2.36203M2.19368 3.84945L2.24368 3.84942M5.38764 2.36203L5.36689 2.40752M5.36689 2.40752C5.06539 2.26919 4.71246 2.20002 4.30809 2.20002C3.91081 2.20002 3.5561 2.26919 3.24396 2.40752C2.93181 2.54586 2.68618 2.73829 2.50705 2.98482M5.36689 2.40752C5.67017 2.54586 5.90605 2.73829 6.07454 2.98482C6.21435 3.18938 6.29799 3.42082 6.32545 3.67912C6.3349 3.76801 6.262 3.84144 6.17261 3.84144H5.37613C5.29388 3.84144 5.22712 3.77829 5.20489 3.6991C5.16352 3.55168 5.08096 3.43241 4.9572 3.3413C4.79581 3.22247 4.57678 3.16306 4.3001 3.16306M4.7896 6.74919C4.64595 6.80594 4.47657 6.83432 4.28148 6.83432C4.08285 6.83432 3.9046 6.80417 3.74676 6.74386C3.59069 6.68179 3.46565 6.59045 3.37165 6.46985M4.7896 6.74919C4.93504 6.69243 5.04766 6.61351 5.12747 6.51242M4.7896 6.74919L5.12747 6.51242M5.12747 6.51242C5.20728 6.41132 5.24718 6.29516 5.24718 6.16391M5.12747 6.51242L5.24718 6.16391M5.24718 6.16391C5.24718 6.04154 5.21082 5.93867 5.13811 5.85531L5.24718 6.16391Z" stroke="#687076" stroke-width="0.1"/>
|
||||
</svg>
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 5.5 KiB |
5
assets/icons/server.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.99993 6.85713C11.1558 6.85713 13.7142 5.83379 13.7142 4.57142C13.7142 3.30905 11.1558 2.28571 7.99993 2.28571C4.84402 2.28571 2.28564 3.30905 2.28564 4.57142C2.28564 5.83379 4.84402 6.85713 7.99993 6.85713Z" fill="black" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M13.7142 4.57141V11.4286C13.7142 12.691 11.1558 13.7143 7.99993 13.7143C4.84402 13.7143 2.28564 12.691 2.28564 11.4286V4.57141" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M13.7142 8C13.7142 9.26237 11.1558 10.2857 7.99993 10.2857C4.84402 10.2857 2.28564 9.26237 2.28564 8" stroke="black" stroke-width="1.5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 692 B |
1
assets/icons/trash.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>
|
||||
|
After Width: | Height: | Size: 409 B |
@@ -28,7 +28,7 @@
|
||||
"ctrl-0": "zed::ResetBufferFontSize",
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"ctrl-h": "zed::Hide",
|
||||
"alt-f9": "zed::Hide",
|
||||
"f11": "zed::ToggleFullScreen"
|
||||
}
|
||||
},
|
||||
@@ -38,7 +38,6 @@
|
||||
"escape": "editor::Cancel",
|
||||
"backspace": "editor::Backspace",
|
||||
"shift-backspace": "editor::Backspace",
|
||||
"ctrl-h": "editor::Backspace",
|
||||
"delete": "editor::Delete",
|
||||
"ctrl-d": "editor::Delete",
|
||||
"tab": "editor::Tab",
|
||||
@@ -150,10 +149,11 @@
|
||||
"ctrl-shift-enter": "editor::NewlineBelow",
|
||||
"ctrl-enter": "editor::NewlineAbove",
|
||||
"alt-z": "editor::ToggleSoftWrap",
|
||||
"ctrl-f": [
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": [
|
||||
"buffer_search::Deploy",
|
||||
{
|
||||
"focus": true
|
||||
"replace_enabled": true
|
||||
}
|
||||
],
|
||||
// "cmd-e": [
|
||||
@@ -212,7 +212,8 @@
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-tab": "search::CycleMode"
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"ctrl-h": "search::ToggleReplace"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -233,10 +234,10 @@
|
||||
"context": "ProjectSearchBar",
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"ctrl-shift-f": "search::FocusSearch",
|
||||
"ctrl-shift-h": "search::ToggleReplace",
|
||||
"alt-ctrl-g": "search::ActivateRegexMode",
|
||||
"alt-ctrl-x": "search::ActivateTextMode"
|
||||
"alt-ctrl-g": "search::ToggleRegex",
|
||||
"alt-ctrl-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -257,10 +258,9 @@
|
||||
"context": "ProjectSearchView",
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"ctrl-shift-h": "search::ToggleReplace",
|
||||
"alt-ctrl-g": "search::ActivateRegexMode",
|
||||
"alt-ctrl-x": "search::ActivateTextMode"
|
||||
"alt-ctrl-g": "search::ToggleRegex",
|
||||
"alt-ctrl-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -280,10 +280,10 @@
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-c": "search::ToggleCaseSensitive",
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"alt-r": "search::CycleMode",
|
||||
"alt-r": "search::ToggleRegex",
|
||||
"alt-ctrl-f": "project_search::ToggleFilters",
|
||||
"ctrl-alt-shift-r": "search::ActivateRegexMode",
|
||||
"ctrl-alt-shift-x": "search::ActivateTextMode"
|
||||
"ctrl-alt-shift-r": "search::ToggleRegex",
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
// Bindings from VS Code
|
||||
@@ -419,6 +419,12 @@
|
||||
"ctrl-j": "workspace::ToggleBottomDock",
|
||||
"ctrl-alt-y": "workspace::CloseAllDocks",
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": [
|
||||
"pane::DeploySearch",
|
||||
{
|
||||
"replace_enabled": true
|
||||
}
|
||||
],
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-shift-t": "project_symbols::Toggle",
|
||||
@@ -598,7 +604,12 @@
|
||||
},
|
||||
{
|
||||
"context": "TabSwitcher",
|
||||
"bindings": { "ctrl-shift-tab": "menu::SelectPrev" }
|
||||
"bindings": {
|
||||
"ctrl-up": "menu::SelectPrev",
|
||||
"ctrl-down": "menu::SelectNext",
|
||||
"ctrl-shift-tab": "menu::SelectPrev",
|
||||
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Terminal",
|
||||
|
||||
@@ -170,10 +170,11 @@
|
||||
"cmd-shift-enter": "editor::NewlineAbove",
|
||||
"cmd-enter": "editor::NewlineBelow",
|
||||
"alt-z": "editor::ToggleSoftWrap",
|
||||
"cmd-f": [
|
||||
"cmd-f": "buffer_search::Deploy",
|
||||
"cmd-alt-f": [
|
||||
"buffer_search::Deploy",
|
||||
{
|
||||
"focus": true
|
||||
"replace_enabled": true
|
||||
}
|
||||
],
|
||||
"cmd-e": [
|
||||
@@ -232,7 +233,8 @@
|
||||
"enter": "search::SelectNextMatch",
|
||||
"shift-enter": "search::SelectPrevMatch",
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-tab": "search::CycleMode"
|
||||
"cmd-f": "search::FocusSearch",
|
||||
"cmd-alt-f": "search::ToggleReplace"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -253,10 +255,10 @@
|
||||
"context": "ProjectSearchBar",
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"cmd-shift-f": "search::FocusSearch",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ActivateRegexMode",
|
||||
"alt-cmd-x": "search::ActivateTextMode"
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -277,10 +279,9 @@
|
||||
"context": "ProjectSearchView",
|
||||
"bindings": {
|
||||
"escape": "project_search::ToggleFocus",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"cmd-shift-h": "search::ToggleReplace",
|
||||
"alt-cmd-g": "search::ActivateRegexMode",
|
||||
"alt-cmd-x": "search::ActivateTextMode"
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -302,10 +303,9 @@
|
||||
"alt-enter": "search::SelectAllMatches",
|
||||
"alt-cmd-c": "search::ToggleCaseSensitive",
|
||||
"alt-cmd-w": "search::ToggleWholeWord",
|
||||
"alt-tab": "search::CycleMode",
|
||||
"alt-cmd-f": "project_search::ToggleFilters",
|
||||
"alt-cmd-g": "search::ActivateRegexMode",
|
||||
"alt-cmd-x": "search::ActivateTextMode"
|
||||
"alt-cmd-g": "search::ToggleRegex",
|
||||
"alt-cmd-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
// Bindings from VS Code
|
||||
@@ -436,6 +436,12 @@
|
||||
"cmd-j": "workspace::ToggleBottomDock",
|
||||
"alt-cmd-y": "workspace::CloseAllDocks",
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-shift-h": [
|
||||
"pane::DeploySearch",
|
||||
{
|
||||
"replace_enabled": true
|
||||
}
|
||||
],
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
@@ -606,7 +612,12 @@
|
||||
},
|
||||
{
|
||||
"context": "TabSwitcher",
|
||||
"bindings": { "ctrl-shift-tab": "menu::SelectPrev" }
|
||||
"bindings": {
|
||||
"ctrl-up": "menu::SelectPrev",
|
||||
"ctrl-down": "menu::SelectNext",
|
||||
"ctrl-shift-tab": "menu::SelectPrev",
|
||||
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Terminal",
|
||||
|
||||
@@ -73,8 +73,17 @@
|
||||
],
|
||||
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
|
||||
|
||||
"n": "search::SelectNextMatch",
|
||||
"shift-n": "search::SelectPrevMatch",
|
||||
"/": "vim::Search",
|
||||
"?": [
|
||||
"vim::Search",
|
||||
{
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
"*": "vim::MoveToNext",
|
||||
"#": "vim::MoveToPrev",
|
||||
"n": "vim::MoveToNextMatch",
|
||||
"shift-n": "vim::MoveToPrevMatch",
|
||||
"%": "vim::Matching",
|
||||
"f": [
|
||||
"vim::PushOperator",
|
||||
@@ -137,8 +146,10 @@
|
||||
"g d": "editor::GoToDefinition",
|
||||
"g shift-d": "editor::GoToTypeDefinition",
|
||||
"g x": "editor::OpenUrl",
|
||||
"g n": "vim::SelectNext",
|
||||
"g shift-n": "vim::SelectPrevious",
|
||||
"g n": "vim::SelectNextMatch",
|
||||
"g shift-n": "vim::SelectPreviousMatch",
|
||||
"g l": "vim::SelectNext",
|
||||
"g shift-l": "vim::SelectPrevious",
|
||||
"g >": [
|
||||
"editor::SelectNext",
|
||||
{
|
||||
@@ -349,15 +360,6 @@
|
||||
],
|
||||
"u": "editor::Undo",
|
||||
"ctrl-r": "editor::Redo",
|
||||
"/": "vim::Search",
|
||||
"?": [
|
||||
"vim::Search",
|
||||
{
|
||||
"backwards": true
|
||||
}
|
||||
],
|
||||
"*": "vim::MoveToNext",
|
||||
"#": "vim::MoveToPrev",
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
@@ -382,18 +384,46 @@
|
||||
"d": "editor::Rename" // zed specific
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == normal && vim_operator == c",
|
||||
"bindings": {
|
||||
"s": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"ChangeSurrounds": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == d",
|
||||
"bindings": {
|
||||
"d": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == normal && vim_operator == d",
|
||||
"bindings": {
|
||||
"s": ["vim::PushOperator", "DeleteSurrounds"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == y",
|
||||
"bindings": {
|
||||
"y": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_mode == normal && vim_operator == y",
|
||||
"bindings": {
|
||||
"s": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"AddSurrounds": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && VimObject",
|
||||
"bindings": {
|
||||
@@ -546,6 +576,12 @@
|
||||
"escape": "buffer_search::Dismiss"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "EmptyPane || SharedScreen",
|
||||
"bindings": {
|
||||
":": "command_palette::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
// netrw compatibility
|
||||
"context": "ProjectPanel && not_editing",
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
// },
|
||||
"buffer_line_height": "comfortable",
|
||||
// The name of a font to use for rendering text in the UI
|
||||
"ui_font_family": "Zed Sans",
|
||||
"ui_font_family": ".SystemUIFont",
|
||||
// The OpenType features to enable for text in the UI
|
||||
"ui_font_features": {
|
||||
// Disable ligatures:
|
||||
@@ -58,6 +58,8 @@
|
||||
"hover_popover_enabled": true,
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether to restore last closed project when fresh Zed instance is opened.
|
||||
"restore_on_startup": "last_workspace",
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Whether to pop the completions menu while typing in an editor without
|
||||
@@ -70,7 +72,7 @@
|
||||
// documentation when not included in original completion list.
|
||||
"completion_documentation_secondary_query_debounce": 300,
|
||||
// Whether to show wrap guides in the editor. Setting this to true will
|
||||
// show a guide at the 'preferred_line_length' value if softwrap is set to
|
||||
// show a guide at the 'preferred_line_length' value if 'soft_wrap' is set to
|
||||
// 'preferred_line_length', and will show any additional guides as specified
|
||||
// by the 'wrap_guides' setting.
|
||||
"show_wrap_guides": true,
|
||||
@@ -284,6 +286,11 @@
|
||||
// 4. Save when idle for a certain amount of time:
|
||||
// "autosave": { "after_delay": {"milliseconds": 500} },
|
||||
"autosave": "off",
|
||||
// Settings related to the editor's tab bar.
|
||||
"tab_bar": {
|
||||
// Whether or not to show the navigation history buttons.
|
||||
"show_nav_history_buttons": true
|
||||
},
|
||||
// Settings related to the editor's tabs
|
||||
"tabs": {
|
||||
// Show git status colors in the editor tabs.
|
||||
@@ -291,6 +298,16 @@
|
||||
// Position of the close button on the editor tabs.
|
||||
"close_position": "right"
|
||||
},
|
||||
// Settings related to preview tabs.
|
||||
"preview_tabs": {
|
||||
// Whether preview tabs should be enabled.
|
||||
// Preview tabs allow you to open files in preview mode, where they close automatically
|
||||
// when you switch to another file unless you explicitly pin them.
|
||||
// This is useful for quickly viewing files without cluttering your workspace.
|
||||
"enabled": true,
|
||||
// Whether to open files in preview mode when selected from the file finder.
|
||||
"enable_preview_from_file_finder": false
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
@@ -545,18 +562,16 @@
|
||||
"file_types": {},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Plain Text": {
|
||||
"soft_wrap": "preferred_line_length"
|
||||
"C++": {
|
||||
"format_on_save": "off"
|
||||
},
|
||||
"Elixir": {
|
||||
"tab_size": 2
|
||||
"C": {
|
||||
"format_on_save": "off"
|
||||
},
|
||||
"Gleam": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"Go": {
|
||||
"tab_size": 4,
|
||||
"hard_tabs": true,
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
}
|
||||
@@ -564,40 +579,12 @@
|
||||
"Make": {
|
||||
"hard_tabs": true
|
||||
},
|
||||
"Markdown": {
|
||||
"tab_size": 2,
|
||||
"soft_wrap": "preferred_line_length"
|
||||
},
|
||||
"JavaScript": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"Terraform": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"TypeScript": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"TSX": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"YAML": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"JSON": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"OCaml": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"OCaml Interface": {
|
||||
"tab_size": 2
|
||||
},
|
||||
"Prisma": {
|
||||
"tab_size": 2
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
|
||||
// If Prettier is enabled, Zed will use this for its Prettier instance for any applicable file, if
|
||||
// project has no other Prettier installed.
|
||||
"prettier": {
|
||||
// Use regular Prettier json configuration:
|
||||
@@ -646,5 +633,17 @@
|
||||
// Mostly useful for developers who are managing multiple instances of Zed.
|
||||
"dev": {
|
||||
// "theme": "Andromeda"
|
||||
}
|
||||
},
|
||||
// Task-related settings.
|
||||
"task": {
|
||||
// Whether to show task status indicator in the status bar. Default: true
|
||||
"show_status_indicator": true
|
||||
},
|
||||
// Whether to show full labels in line indicator or short ones
|
||||
//
|
||||
// Values:
|
||||
// - `short`: "2 s, 15 l, 32 c"
|
||||
// - `long`: "2 selections, 15 lines, 32 characters"
|
||||
// Default: long
|
||||
"line_indicator_format": "long"
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ use gpui::{
|
||||
StatefulInteractiveElement, Styled, Subscription, Task, TextStyle, UniformListScrollHandle,
|
||||
View, ViewContext, VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext,
|
||||
};
|
||||
use language::{language_settings::SoftWrap, Buffer, BufferId, LanguageRegistry, ToOffset as _};
|
||||
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry, ToOffset as _};
|
||||
use parking_lot::Mutex;
|
||||
use project::Project;
|
||||
use search::{buffer_search::DivRegistrar, BufferSearchBar};
|
||||
@@ -46,6 +46,7 @@ use ui::{
|
||||
};
|
||||
use util::{paths::CONVERSATIONS_DIR, post_inc, ResultExt, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
searchable::Direction,
|
||||
@@ -345,7 +346,7 @@ impl AssistantPanel {
|
||||
style: BlockStyle::Flex,
|
||||
position: snapshot.anchor_before(point_selection.head()),
|
||||
height: 2,
|
||||
render: Arc::new({
|
||||
render: Box::new({
|
||||
let inline_assistant = inline_assistant.clone();
|
||||
move |cx: &mut BlockContext| {
|
||||
*measurements.lock() = BlockMeasurements {
|
||||
@@ -418,10 +419,14 @@ impl AssistantPanel {
|
||||
if pending_assist.inline_assistant.is_none() {
|
||||
if let Some(workspace) = this.workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(inline_assist_id, error),
|
||||
cx,
|
||||
);
|
||||
struct InlineAssistantError;
|
||||
|
||||
let id =
|
||||
NotificationId::identified::<InlineAssistantError>(
|
||||
inline_assist_id,
|
||||
);
|
||||
|
||||
workspace.show_toast(Toast::new(id, error), cx);
|
||||
})
|
||||
}
|
||||
|
||||
@@ -620,10 +625,10 @@ impl AssistantPanel {
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() != "Markdown" {
|
||||
0.5
|
||||
} else {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
@@ -695,8 +700,8 @@ impl AssistantPanel {
|
||||
editor.clear_background_highlights::<PendingInlineAssist>(cx);
|
||||
} else {
|
||||
editor.highlight_background::<PendingInlineAssist>(
|
||||
background_ranges,
|
||||
|theme| theme.editor_active_line_background, // todo!("use the appropriate color")
|
||||
&background_ranges,
|
||||
|theme| theme.editor_active_line_background, // TODO use the appropriate color
|
||||
cx,
|
||||
);
|
||||
}
|
||||
@@ -1310,7 +1315,7 @@ impl Conversation {
|
||||
) -> Self {
|
||||
let markdown = language_registry.language_for_name("Markdown");
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "");
|
||||
let mut buffer = Buffer::local("", cx);
|
||||
buffer.set_language_registry(language_registry);
|
||||
cx.spawn(|buffer, mut cx| async move {
|
||||
let markdown = markdown.await?;
|
||||
@@ -1398,11 +1403,7 @@ impl Conversation {
|
||||
let mut message_anchors = Vec::new();
|
||||
let mut next_message_id = MessageId(0);
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
saved_conversation.text,
|
||||
);
|
||||
let mut buffer = Buffer::local(saved_conversation.text, cx);
|
||||
for message in saved_conversation.messages {
|
||||
message_anchors.push(MessageAnchor {
|
||||
id: message.id,
|
||||
@@ -2266,7 +2267,7 @@ impl ConversationEditor {
|
||||
.unwrap(),
|
||||
height: 2,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Arc::new({
|
||||
render: Box::new({
|
||||
let conversation = self.conversation.clone();
|
||||
move |_cx| {
|
||||
let message_id = message.id;
|
||||
|
||||
@@ -10,7 +10,7 @@ use serde::{
|
||||
de::{self, Visitor},
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
pub enum ZedDotDevModel {
|
||||
@@ -332,13 +332,12 @@ impl Settings for AssistantSettings {
|
||||
type FileContent = AssistantSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
let mut settings = AssistantSettings::default();
|
||||
|
||||
for value in [default_value].iter().chain(user_values) {
|
||||
for value in sources.defaults_and_customizations() {
|
||||
let value = value.upgrade();
|
||||
merge(&mut settings.enabled, value.enabled);
|
||||
merge(&mut settings.button, value.button);
|
||||
|
||||
@@ -361,8 +361,8 @@ mod tests {
|
||||
use gpui::{Context, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
language_settings, tree_sitter_rust, Buffer, BufferId, Language, LanguageConfig,
|
||||
LanguageMatcher, Point,
|
||||
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
Point,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
@@ -388,9 +388,8 @@ mod tests {
|
||||
}
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let buffer =
|
||||
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -447,9 +446,8 @@ mod tests {
|
||||
le
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let buffer =
|
||||
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let position = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -506,9 +504,8 @@ mod tests {
|
||||
" \n",
|
||||
"}\n" //
|
||||
);
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
|
||||
});
|
||||
let buffer =
|
||||
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let position = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
|
||||
@@ -11,13 +11,13 @@ use gpui::{
|
||||
};
|
||||
use isahc::AsyncBody;
|
||||
|
||||
use markdown_preview::markdown_preview_view::MarkdownPreviewView;
|
||||
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde_derive::Serialize;
|
||||
use smol::io::AsyncReadExt;
|
||||
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use smol::{fs::File, process::Command};
|
||||
|
||||
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
|
||||
@@ -32,6 +32,7 @@ use util::{
|
||||
http::{HttpClient, HttpClientWithUrl},
|
||||
ResultExt,
|
||||
};
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::Workspace;
|
||||
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
@@ -82,25 +83,22 @@ struct AutoUpdateSetting(bool);
|
||||
/// Whether or not to automatically check for updates.
|
||||
///
|
||||
/// Default: true
|
||||
#[derive(Clone, Default, JsonSchema, Deserialize, Serialize)]
|
||||
#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
|
||||
#[serde(transparent)]
|
||||
struct AutoUpdateSettingOverride(Option<bool>);
|
||||
struct AutoUpdateSettingContent(bool);
|
||||
|
||||
impl Settings for AutoUpdateSetting {
|
||||
const KEY: Option<&'static str> = Some("auto_update");
|
||||
|
||||
type FileContent = AutoUpdateSettingOverride;
|
||||
type FileContent = Option<AutoUpdateSettingContent>;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut AppContext,
|
||||
) -> Result<Self> {
|
||||
Ok(Self(
|
||||
Self::json_merge(default_value, user_values)?
|
||||
.0
|
||||
.ok_or_else(Self::missing_default)?,
|
||||
))
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let auto_update = [sources.release_channel, sources.user]
|
||||
.into_iter()
|
||||
.find_map(|value| value.copied().flatten())
|
||||
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
|
||||
|
||||
Ok(Self(auto_update.0))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,10 +236,11 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
|
||||
.new_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx));
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
let view: View<MarkdownPreviewView> = MarkdownPreviewView::new(
|
||||
MarkdownPreviewMode::Default,
|
||||
editor,
|
||||
workspace_handle,
|
||||
Some(tab_description),
|
||||
language_registry,
|
||||
Some(tab_description),
|
||||
cx,
|
||||
);
|
||||
workspace.add_item_to_active_pane(Box::new(view.clone()), cx);
|
||||
@@ -264,9 +263,11 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
|
||||
let should_show_notification = should_show_notification.await?;
|
||||
if should_show_notification {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.show_notification(0, cx, |cx| {
|
||||
cx.new_view(|_| UpdateNotification::new(version))
|
||||
});
|
||||
workspace.show_notification(
|
||||
NotificationId::unique::<UpdateNotification>(),
|
||||
cx,
|
||||
|cx| cx.new_view(|_| UpdateNotification::new(version)),
|
||||
);
|
||||
updater
|
||||
.read(cx)
|
||||
.set_should_show_update_notification(false, cx)
|
||||
|
||||
@@ -60,7 +60,7 @@ impl Render for Breadcrumbs {
|
||||
let mut text_style = cx.text_style();
|
||||
text_style.color = Color::Muted.color(cx);
|
||||
|
||||
StyledText::new(segment.text)
|
||||
StyledText::new(segment.text.replace('\n', ""))
|
||||
.with_highlights(&text_style, segment.highlights.unwrap_or_default())
|
||||
.into_any()
|
||||
});
|
||||
|
||||
@@ -373,7 +373,10 @@ impl ActiveCall {
|
||||
self.report_call_event("hang up", cx);
|
||||
|
||||
Audio::end_call(cx);
|
||||
|
||||
let channel_id = self.channel_id(cx);
|
||||
if let Some((room, _)) = self.room.take() {
|
||||
cx.emit(Event::RoomLeft { channel_id });
|
||||
room.update(cx, |room, cx| room.leave(cx))
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
@@ -429,7 +432,9 @@ impl ActiveCall {
|
||||
room: Option<Model<Room>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
|
||||
if room.as_ref() == self.room.as_ref().map(|room| &room.0) {
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
cx.notify();
|
||||
if let Some(room) = room {
|
||||
if room.read(cx).status().is_offline() {
|
||||
@@ -459,8 +464,6 @@ impl ActiveCall {
|
||||
self.room = None;
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use anyhow::Result;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct CallSettings {
|
||||
@@ -29,14 +29,7 @@ impl Settings for CallSettings {
|
||||
|
||||
type FileContent = CallSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_cx: &mut AppContext,
|
||||
) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,7 @@ pub enum Event {
|
||||
RemoteProjectInvitationDiscarded {
|
||||
project_id: u64,
|
||||
},
|
||||
Left {
|
||||
RoomLeft {
|
||||
channel_id: Option<ChannelId>,
|
||||
},
|
||||
}
|
||||
@@ -366,9 +366,6 @@ impl Room {
|
||||
|
||||
pub(crate) fn leave(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
cx.notify();
|
||||
cx.emit(Event::Left {
|
||||
channel_id: self.channel_id(),
|
||||
});
|
||||
self.leave_internal(cx)
|
||||
}
|
||||
|
||||
@@ -1185,7 +1182,7 @@ impl Room {
|
||||
cx.emit(Event::RemoteProjectJoined { project_id: id });
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let project =
|
||||
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
|
||||
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.joined_projects.retain(|project| {
|
||||
|
||||
@@ -11,7 +11,9 @@ pub use channel_chat::{
|
||||
mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
|
||||
MessageParams,
|
||||
};
|
||||
pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore};
|
||||
pub use channel_store::{
|
||||
Channel, ChannelEvent, ChannelMembership, ChannelStore, DevServer, RemoteProject,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
mod channel_store_tests;
|
||||
|
||||
@@ -222,6 +222,9 @@ impl ChannelChat {
|
||||
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.insert_messages(SumTree::from_item(message, &()), cx);
|
||||
if this.first_loaded_message_id.is_none() {
|
||||
this.first_loaded_message_id = Some(id);
|
||||
}
|
||||
})?;
|
||||
Ok(id)
|
||||
}))
|
||||
@@ -649,13 +652,27 @@ impl ChannelChat {
|
||||
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
|
||||
if let Some(item) = cursor.item() {
|
||||
if item.id == ChannelMessageId::Saved(id) {
|
||||
let ix = messages.summary().count;
|
||||
let deleted_message_ix = messages.summary().count;
|
||||
cursor.next(&());
|
||||
messages.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.messages = messages;
|
||||
|
||||
// If the message that was deleted was the last acknowledged message,
|
||||
// replace the acknowledged message with an earlier one.
|
||||
self.channel_store.update(cx, |store, _| {
|
||||
let summary = self.messages.summary();
|
||||
if summary.count == 0 {
|
||||
store.set_acknowledged_message_id(self.channel_id, None);
|
||||
} else if deleted_message_ix == summary.count {
|
||||
if let ChannelMessageId::Saved(id) = summary.max_id {
|
||||
store.set_acknowledged_message_id(self.channel_id, Some(id));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
cx.emit(ChannelChatEvent::MessagesUpdated {
|
||||
old_range: ix..ix + 1,
|
||||
old_range: deleted_message_ix..deleted_message_ix + 1,
|
||||
new_count: 0,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,7 +3,10 @@ mod channel_index;
|
||||
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
|
||||
use anyhow::{anyhow, Result};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
|
||||
use client::{
|
||||
ChannelId, Client, ClientSettings, DevServerId, ProjectId, RemoteProjectId, Subscription, User,
|
||||
UserId, UserStore,
|
||||
};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
@@ -12,7 +15,7 @@ use gpui::{
|
||||
};
|
||||
use language::Capability;
|
||||
use rpc::{
|
||||
proto::{self, ChannelRole, ChannelVisibility},
|
||||
proto::{self, ChannelRole, ChannelVisibility, DevServerStatus},
|
||||
TypedEnvelope,
|
||||
};
|
||||
use settings::Settings;
|
||||
@@ -40,7 +43,6 @@ pub struct HostedProject {
|
||||
name: SharedString,
|
||||
_visibility: proto::ChannelVisibility,
|
||||
}
|
||||
|
||||
impl From<proto::HostedProject> for HostedProject {
|
||||
fn from(project: proto::HostedProject) -> Self {
|
||||
Self {
|
||||
@@ -52,12 +54,56 @@ impl From<proto::HostedProject> for HostedProject {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteProject {
|
||||
pub id: RemoteProjectId,
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: SharedString,
|
||||
pub path: SharedString,
|
||||
pub dev_server_id: DevServerId,
|
||||
}
|
||||
|
||||
impl From<proto::RemoteProject> for RemoteProject {
|
||||
fn from(project: proto::RemoteProject) -> Self {
|
||||
Self {
|
||||
id: RemoteProjectId(project.id),
|
||||
project_id: project.project_id.map(|id| ProjectId(id)),
|
||||
channel_id: ChannelId(project.channel_id),
|
||||
name: project.name.into(),
|
||||
path: project.path.into(),
|
||||
dev_server_id: DevServerId(project.dev_server_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DevServer {
|
||||
pub id: DevServerId,
|
||||
pub channel_id: ChannelId,
|
||||
pub name: SharedString,
|
||||
pub status: DevServerStatus,
|
||||
}
|
||||
|
||||
impl From<proto::DevServer> for DevServer {
|
||||
fn from(dev_server: proto::DevServer) -> Self {
|
||||
Self {
|
||||
id: DevServerId(dev_server.dev_server_id),
|
||||
channel_id: ChannelId(dev_server.channel_id),
|
||||
status: dev_server.status(),
|
||||
name: dev_server.name.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChannelStore {
|
||||
pub channel_index: ChannelIndex,
|
||||
channel_invitations: Vec<Arc<Channel>>,
|
||||
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
|
||||
channel_states: HashMap<ChannelId, ChannelState>,
|
||||
hosted_projects: HashMap<ProjectId, HostedProject>,
|
||||
remote_projects: HashMap<RemoteProjectId, RemoteProject>,
|
||||
dev_servers: HashMap<DevServerId, DevServer>,
|
||||
|
||||
outgoing_invites: HashSet<(ChannelId, UserId)>,
|
||||
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
|
||||
@@ -87,6 +133,8 @@ pub struct ChannelState {
|
||||
observed_chat_message: Option<u64>,
|
||||
role: Option<ChannelRole>,
|
||||
projects: HashSet<ProjectId>,
|
||||
dev_servers: HashSet<DevServerId>,
|
||||
remote_projects: HashSet<RemoteProjectId>,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
@@ -217,6 +265,8 @@ impl ChannelStore {
|
||||
channel_index: ChannelIndex::default(),
|
||||
channel_participants: Default::default(),
|
||||
hosted_projects: Default::default(),
|
||||
remote_projects: Default::default(),
|
||||
dev_servers: Default::default(),
|
||||
outgoing_invites: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
opened_chats: Default::default(),
|
||||
@@ -316,6 +366,40 @@ impl ChannelStore {
|
||||
projects
|
||||
}
|
||||
|
||||
pub fn dev_servers_for_id(&self, channel_id: ChannelId) -> Vec<DevServer> {
|
||||
let mut dev_servers: Vec<DevServer> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.dev_servers.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| self.dev_servers.get(&id).cloned())
|
||||
.collect();
|
||||
dev_servers.sort_by_key(|s| (s.name.clone(), s.id));
|
||||
dev_servers
|
||||
}
|
||||
|
||||
pub fn find_dev_server_by_id(&self, id: DevServerId) -> Option<&DevServer> {
|
||||
self.dev_servers.get(&id)
|
||||
}
|
||||
|
||||
pub fn find_remote_project_by_id(&self, id: RemoteProjectId) -> Option<&RemoteProject> {
|
||||
self.remote_projects.get(&id)
|
||||
}
|
||||
|
||||
pub fn remote_projects_for_id(&self, channel_id: ChannelId) -> Vec<RemoteProject> {
|
||||
let mut remote_projects: Vec<RemoteProject> = self
|
||||
.channel_states
|
||||
.get(&channel_id)
|
||||
.map(|state| state.remote_projects.clone())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.flat_map(|id| self.remote_projects.get(&id).cloned())
|
||||
.collect();
|
||||
remote_projects.sort_by_key(|p| (p.name.clone(), p.id));
|
||||
remote_projects
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedModelHandle::Open(buffer) = buffer {
|
||||
@@ -380,6 +464,12 @@ impl ChannelStore {
|
||||
.is_some_and(|state| state.has_new_messages())
|
||||
}
|
||||
|
||||
pub fn set_acknowledged_message_id(&mut self, channel_id: ChannelId, message_id: Option<u64>) {
|
||||
if let Some(state) = self.channel_states.get_mut(&channel_id) {
|
||||
state.latest_chat_message = message_id;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn last_acknowledge_message_id(&self, channel_id: ChannelId) -> Option<u64> {
|
||||
self.channel_states.get(&channel_id).and_then(|state| {
|
||||
if let Some(last_message_id) = state.latest_chat_message {
|
||||
@@ -812,6 +902,45 @@ impl ChannelStore {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_remote_project(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
dev_server_id: DevServerId,
|
||||
name: String,
|
||||
path: String,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<proto::CreateRemoteProjectResponse>> {
|
||||
let client = self.client.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
client
|
||||
.request(proto::CreateRemoteProject {
|
||||
channel_id: channel_id.0,
|
||||
dev_server_id: dev_server_id.0,
|
||||
name,
|
||||
path,
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_dev_server(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
name: String,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<proto::CreateDevServerResponse>> {
|
||||
let client = self.client.clone();
|
||||
cx.background_executor().spawn(async move {
|
||||
let result = client
|
||||
.request(proto::CreateDevServer {
|
||||
channel_id: channel_id.0,
|
||||
name,
|
||||
})
|
||||
.await?;
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_channel_member_details(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@@ -1092,7 +1221,11 @@ impl ChannelStore {
|
||||
|| !payload.latest_channel_message_ids.is_empty()
|
||||
|| !payload.latest_channel_buffer_versions.is_empty()
|
||||
|| !payload.hosted_projects.is_empty()
|
||||
|| !payload.deleted_hosted_projects.is_empty();
|
||||
|| !payload.deleted_hosted_projects.is_empty()
|
||||
|| !payload.dev_servers.is_empty()
|
||||
|| !payload.deleted_dev_servers.is_empty()
|
||||
|| !payload.remote_projects.is_empty()
|
||||
|| !payload.deleted_remote_projects.is_empty();
|
||||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
@@ -1180,6 +1313,60 @@ impl ChannelStore {
|
||||
.remove_hosted_project(old_project.project_id);
|
||||
}
|
||||
}
|
||||
|
||||
for remote_project in payload.remote_projects {
|
||||
let remote_project: RemoteProject = remote_project.into();
|
||||
if let Some(old_remote_project) = self
|
||||
.remote_projects
|
||||
.insert(remote_project.id, remote_project.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_remote_project.channel_id)
|
||||
.or_default()
|
||||
.remove_remote_project(old_remote_project.id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(remote_project.channel_id)
|
||||
.or_default()
|
||||
.add_remote_project(remote_project.id);
|
||||
}
|
||||
|
||||
for remote_project_id in payload.deleted_remote_projects {
|
||||
let remote_project_id = RemoteProjectId(remote_project_id);
|
||||
|
||||
if let Some(old_project) = self.remote_projects.remove(&remote_project_id) {
|
||||
self.channel_states
|
||||
.entry(old_project.channel_id)
|
||||
.or_default()
|
||||
.remove_remote_project(old_project.id);
|
||||
}
|
||||
}
|
||||
|
||||
for dev_server in payload.dev_servers {
|
||||
let dev_server: DevServer = dev_server.into();
|
||||
if let Some(old_server) = self.dev_servers.insert(dev_server.id, dev_server.clone())
|
||||
{
|
||||
self.channel_states
|
||||
.entry(old_server.channel_id)
|
||||
.or_default()
|
||||
.remove_dev_server(old_server.id);
|
||||
}
|
||||
self.channel_states
|
||||
.entry(dev_server.channel_id)
|
||||
.or_default()
|
||||
.add_dev_server(dev_server.id);
|
||||
}
|
||||
|
||||
for dev_server_id in payload.deleted_dev_servers {
|
||||
let dev_server_id = DevServerId(dev_server_id);
|
||||
|
||||
if let Some(old_server) = self.dev_servers.remove(&dev_server_id) {
|
||||
self.channel_states
|
||||
.entry(old_server.channel_id)
|
||||
.or_default()
|
||||
.remove_dev_server(old_server.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1294,4 +1481,20 @@ impl ChannelState {
|
||||
fn remove_hosted_project(&mut self, project_id: ProjectId) {
|
||||
self.projects.remove(&project_id);
|
||||
}
|
||||
|
||||
fn add_remote_project(&mut self, remote_project_id: RemoteProjectId) {
|
||||
self.remote_projects.insert(remote_project_id);
|
||||
}
|
||||
|
||||
fn remove_remote_project(&mut self, remote_project_id: RemoteProjectId) {
|
||||
self.remote_projects.remove(&remote_project_id);
|
||||
}
|
||||
|
||||
fn add_dev_server(&mut self, dev_server_id: DevServerId) {
|
||||
self.dev_servers.insert(dev_server_id);
|
||||
}
|
||||
|
||||
fn remove_dev_server(&mut self, dev_server_id: DevServerId) {
|
||||
self.dev_servers.remove(&dev_server_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use std::fmt;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -97,15 +97,8 @@ impl Settings for ClientSettings {
|
||||
|
||||
type FileContent = ClientSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut AppContext,
|
||||
) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let mut result = Self::load_via_json_merge(default_value, user_values)?;
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let mut result = sources.json_merge::<Self>()?;
|
||||
if let Some(server_url) = &*ZED_SERVER_URL {
|
||||
result.server_url = server_url.clone()
|
||||
}
|
||||
@@ -427,21 +420,19 @@ impl settings::Settings for TelemetrySettings {
|
||||
|
||||
type FileContent = TelemetrySettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut AppContext,
|
||||
) -> Result<Self> {
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
Ok(Self {
|
||||
diagnostics: user_values.first().and_then(|v| v.diagnostics).unwrap_or(
|
||||
default_value
|
||||
diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or(
|
||||
sources
|
||||
.default
|
||||
.diagnostics
|
||||
.ok_or_else(Self::missing_default)?,
|
||||
),
|
||||
metrics: user_values
|
||||
.first()
|
||||
metrics: sources
|
||||
.user
|
||||
.as_ref()
|
||||
.and_then(|v| v.metrics)
|
||||
.unwrap_or(default_value.metrics.ok_or_else(Self::missing_default)?),
|
||||
.unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -768,8 +759,9 @@ impl Client {
|
||||
read_credentials_from_keychain(cx).await.is_some()
|
||||
}
|
||||
|
||||
pub fn set_dev_server_token(&self, token: DevServerToken) {
|
||||
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
|
||||
self.state.write().credentials = Some(Credentials::DevServer { token });
|
||||
self
|
||||
}
|
||||
|
||||
#[async_recursion(?Send)]
|
||||
@@ -790,7 +782,6 @@ impl Client {
|
||||
}
|
||||
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
|
||||
};
|
||||
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Authenticating, cx);
|
||||
} else {
|
||||
|
||||
@@ -27,6 +27,12 @@ impl std::fmt::Display for ChannelId {
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct ProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct DevServerId(pub u64);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct RemoteProjectId(pub u64);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub struct ParticipantIndex(pub u32);
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
DATABASE_URL = "postgres://postgres@localhost/zed"
|
||||
# DATABASE_URL = "sqlite:////home/zed/.config/zed/db.sqlite3?mode=rwc"
|
||||
# DATABASE_URL = "sqlite:////root/0/zed/db.sqlite3?mode=rwc"
|
||||
DATABASE_MAX_CONNECTIONS = 5
|
||||
HTTP_PORT = 8080
|
||||
API_TOKEN = "secret"
|
||||
|
||||
@@ -63,8 +63,8 @@ tokio.workspace = true
|
||||
toml.workspace = true
|
||||
tower = "0.4"
|
||||
tower-http = { workspace = true, features = ["trace"] }
|
||||
tracing = "0.1.34"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json", "registry", "tracing-log"] }
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = { git = "https://github.com/tokio-rs/tracing", rev = "tracing-subscriber-0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
@@ -102,3 +102,4 @@ theme.workspace = true
|
||||
unindent.workspace = true
|
||||
util.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
headless.workspace = true
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
|
||||
[Interface]
|
||||
PrivateKey = B5Fp/yVfP0QYlb+YJv9ea+EMI1mWODPD3akh91cVjvc=
|
||||
Address = fdaa:0:2ce3:a7b:bea:0:a:2/120
|
||||
DNS = fdaa:0:2ce3::3
|
||||
|
||||
[Peer]
|
||||
PublicKey = RKAYPljEJiuaELNDdQIEJmQienT9+LRISfIHwH45HAw=
|
||||
AllowedIPs = fdaa:0:2ce3::/48
|
||||
Endpoint = ord1.gateway.6pn.dev:51820
|
||||
PersistentKeepalive = 15
|
||||
|
||||
@@ -47,19 +47,6 @@ spec:
|
||||
metadata:
|
||||
labels:
|
||||
app: ${ZED_SERVICE_NAME}
|
||||
annotations:
|
||||
ad.datadoghq.com/collab.check_names: |
|
||||
["openmetrics"]
|
||||
ad.datadoghq.com/collab.init_configs: |
|
||||
[{}]
|
||||
ad.datadoghq.com/collab.instances: |
|
||||
[
|
||||
{
|
||||
"openmetrics_endpoint": "http://%%host%%:%%port%%/metrics",
|
||||
"namespace": "collab_${ZED_KUBE_NAMESPACE}",
|
||||
"metrics": [".*"]
|
||||
}
|
||||
]
|
||||
spec:
|
||||
containers:
|
||||
- name: ${ZED_SERVICE_NAME}
|
||||
|
||||
@@ -45,12 +45,13 @@ CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
|
||||
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE NOT NULL,
|
||||
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE,
|
||||
"host_user_id" INTEGER REFERENCES users (id),
|
||||
"host_connection_id" INTEGER,
|
||||
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"hosted_project_id" INTEGER REFERENCES hosted_projects (id)
|
||||
"hosted_project_id" INTEGER REFERENCES hosted_projects (id),
|
||||
"remote_project_id" INTEGER REFERENCES remote_projects(id)
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
|
||||
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
|
||||
@@ -397,7 +398,9 @@ CREATE TABLE hosted_projects (
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
name TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL,
|
||||
deleted_at TIMESTAMP NULL
|
||||
deleted_at TIMESTAMP NULL,
|
||||
dev_server_id INTEGER REFERENCES dev_servers(id),
|
||||
dev_server_path TEXT
|
||||
);
|
||||
CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id);
|
||||
CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL);
|
||||
@@ -409,3 +412,13 @@ CREATE TABLE dev_servers (
|
||||
hashed_token TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX idx_dev_servers_on_channel_id ON dev_servers (channel_id);
|
||||
|
||||
CREATE TABLE remote_projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
channel_id INTEGER NOT NULL REFERENCES channels(id),
|
||||
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
|
||||
name TEXT NOT NULL,
|
||||
path TEXT NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE hosted_projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id);
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
CREATE TABLE remote_projects (
|
||||
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
|
||||
channel_id INT NOT NULL REFERENCES channels(id),
|
||||
dev_server_id INT NOT NULL REFERENCES dev_servers(id),
|
||||
name TEXT NOT NULL,
|
||||
path TEXT NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id);
|
||||
@@ -10,6 +10,7 @@ use axum::{
|
||||
response::IntoResponse,
|
||||
};
|
||||
use prometheus::{exponential_buckets, register_histogram, Histogram};
|
||||
pub use rpc::auth::random_token;
|
||||
use scrypt::{
|
||||
password_hash::{PasswordHash, PasswordVerifier},
|
||||
Scrypt,
|
||||
@@ -152,7 +153,7 @@ pub async fn create_access_token(
|
||||
/// Hashing prevents anyone with access to the database being able to login.
|
||||
/// As the token is randomly generated, we don't need to worry about scrypt-style
|
||||
/// protection.
|
||||
fn hash_access_token(token: &str) -> String {
|
||||
pub fn hash_access_token(token: &str) -> String {
|
||||
let digest = sha2::Sha256::digest(token);
|
||||
format!(
|
||||
"$sha256${}",
|
||||
@@ -230,18 +231,15 @@ pub async fn verify_access_token(
|
||||
})
|
||||
}
|
||||
|
||||
// a dev_server_token has the format <id>.<base64>. This is to make them
|
||||
// relatively easy to copy/paste around.
|
||||
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
|
||||
format!("{}.{}", id, access_token)
|
||||
}
|
||||
|
||||
pub async fn verify_dev_server_token(
|
||||
dev_server_token: &str,
|
||||
db: &Arc<Database>,
|
||||
) -> anyhow::Result<dev_server::Model> {
|
||||
let mut parts = dev_server_token.splitn(2, '.');
|
||||
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
|
||||
let token = parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
|
||||
|
||||
let (id, token) = split_dev_server_token(dev_server_token)?;
|
||||
let token_hash = hash_access_token(&token);
|
||||
let server = db.get_dev_server(id).await?;
|
||||
|
||||
@@ -257,6 +255,17 @@ pub async fn verify_dev_server_token(
|
||||
}
|
||||
}
|
||||
|
||||
// a dev_server_token has the format <id>.<base64>. This is to make them
|
||||
// relatively easy to copy/paste around.
|
||||
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
|
||||
let mut parts = dev_server_token.splitn(2, '.');
|
||||
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
|
||||
let token = parts
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
|
||||
Ok((id, token))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rand::thread_rng;
|
||||
|
||||
@@ -56,6 +56,7 @@ pub struct Database {
|
||||
options: ConnectOptions,
|
||||
pool: DatabaseConnection,
|
||||
rooms: DashMap<RoomId, Arc<Mutex<()>>>,
|
||||
projects: DashMap<ProjectId, Arc<Mutex<()>>>,
|
||||
rng: Mutex<StdRng>,
|
||||
executor: Executor,
|
||||
notification_kinds_by_id: HashMap<NotificationKindId, &'static str>,
|
||||
@@ -74,6 +75,7 @@ impl Database {
|
||||
options: options.clone(),
|
||||
pool: sea_orm::Database::connect(options).await?,
|
||||
rooms: DashMap::with_capacity(16384),
|
||||
projects: DashMap::with_capacity(16384),
|
||||
rng: Mutex::new(StdRng::seed_from_u64(0)),
|
||||
notification_kinds_by_id: HashMap::default(),
|
||||
notification_kinds_by_name: HashMap::default(),
|
||||
@@ -86,6 +88,7 @@ impl Database {
|
||||
#[cfg(test)]
|
||||
pub fn reset(&self) {
|
||||
self.rooms.clear();
|
||||
self.projects.clear();
|
||||
}
|
||||
|
||||
/// Runs the database migrations.
|
||||
@@ -190,7 +193,10 @@ impl Database {
|
||||
}
|
||||
|
||||
/// The same as room_transaction, but if you need to only optionally return a Room.
|
||||
async fn optional_room_transaction<F, Fut, T>(&self, f: F) -> Result<Option<RoomGuard<T>>>
|
||||
async fn optional_room_transaction<F, Fut, T>(
|
||||
&self,
|
||||
f: F,
|
||||
) -> Result<Option<TransactionGuard<T>>>
|
||||
where
|
||||
F: Send + Fn(TransactionHandle) -> Fut,
|
||||
Fut: Send + Future<Output = Result<Option<(RoomId, T)>>>,
|
||||
@@ -205,7 +211,7 @@ impl Database {
|
||||
let _guard = lock.lock_owned().await;
|
||||
match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => {
|
||||
return Ok(Some(RoomGuard {
|
||||
return Ok(Some(TransactionGuard {
|
||||
data,
|
||||
_guard,
|
||||
_not_send: PhantomData,
|
||||
@@ -240,10 +246,63 @@ impl Database {
|
||||
self.run(body).await
|
||||
}
|
||||
|
||||
async fn project_transaction<F, Fut, T>(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
f: F,
|
||||
) -> Result<TransactionGuard<T>>
|
||||
where
|
||||
F: Send + Fn(TransactionHandle) -> Fut,
|
||||
Fut: Send + Future<Output = Result<T>>,
|
||||
{
|
||||
let room_id = Database::room_id_for_project(&self, project_id).await?;
|
||||
let body = async {
|
||||
let mut i = 0;
|
||||
loop {
|
||||
let lock = if let Some(room_id) = room_id {
|
||||
self.rooms.entry(room_id).or_default().clone()
|
||||
} else {
|
||||
self.projects.entry(project_id).or_default().clone()
|
||||
};
|
||||
let _guard = lock.lock_owned().await;
|
||||
let (tx, result) = self.with_transaction(&f).await?;
|
||||
match result {
|
||||
Ok(data) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => {
|
||||
return Ok(TransactionGuard {
|
||||
data,
|
||||
_guard,
|
||||
_not_send: PhantomData,
|
||||
});
|
||||
}
|
||||
Err(error) => {
|
||||
if !self.retry_on_serialization_error(&error, i).await {
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
tx.rollback().await?;
|
||||
if !self.retry_on_serialization_error(&error, i).await {
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
};
|
||||
|
||||
self.run(body).await
|
||||
}
|
||||
|
||||
/// room_transaction runs the block in a transaction. It returns a RoomGuard, that keeps
|
||||
/// the database locked until it is dropped. This ensures that updates sent to clients are
|
||||
/// properly serialized with respect to database changes.
|
||||
async fn room_transaction<F, Fut, T>(&self, room_id: RoomId, f: F) -> Result<RoomGuard<T>>
|
||||
async fn room_transaction<F, Fut, T>(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
f: F,
|
||||
) -> Result<TransactionGuard<T>>
|
||||
where
|
||||
F: Send + Fn(TransactionHandle) -> Fut,
|
||||
Fut: Send + Future<Output = Result<T>>,
|
||||
@@ -257,7 +316,7 @@ impl Database {
|
||||
match result {
|
||||
Ok(data) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => {
|
||||
return Ok(RoomGuard {
|
||||
return Ok(TransactionGuard {
|
||||
data,
|
||||
_guard,
|
||||
_not_send: PhantomData,
|
||||
@@ -399,15 +458,16 @@ impl Deref for TransactionHandle {
|
||||
}
|
||||
}
|
||||
|
||||
/// [`RoomGuard`] keeps a database transaction alive until it is dropped.
|
||||
/// so that updates to rooms are serialized.
|
||||
pub struct RoomGuard<T> {
|
||||
/// [`TransactionGuard`] keeps a database transaction alive until it is dropped.
|
||||
/// It wraps data that depends on the state of the database and prevents an additional
|
||||
/// transaction from starting that would invalidate that data.
|
||||
pub struct TransactionGuard<T> {
|
||||
data: T,
|
||||
_guard: OwnedMutexGuard<()>,
|
||||
_not_send: PhantomData<Rc<()>>,
|
||||
}
|
||||
|
||||
impl<T> Deref for RoomGuard<T> {
|
||||
impl<T> Deref for TransactionGuard<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
@@ -415,13 +475,13 @@ impl<T> Deref for RoomGuard<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> DerefMut for RoomGuard<T> {
|
||||
impl<T> DerefMut for TransactionGuard<T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RoomGuard<T> {
|
||||
impl<T> TransactionGuard<T> {
|
||||
/// Returns the inner value of the guard.
|
||||
pub fn into_inner(self) -> T {
|
||||
self.data
|
||||
@@ -460,6 +520,8 @@ pub struct UpdatedChannelMessage {
|
||||
pub notifications: NotificationBatch,
|
||||
pub reply_to_message_id: Option<MessageId>,
|
||||
pub timestamp: PrimitiveDateTime,
|
||||
pub deleted_mention_notification_ids: Vec<NotificationId>,
|
||||
pub updated_mention_notifications: Vec<rpc::proto::Notification>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
||||
@@ -516,6 +578,7 @@ pub struct MembershipUpdated {
|
||||
|
||||
/// The result of setting a member's role.
|
||||
#[derive(Debug)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SetMemberRoleResult {
|
||||
InviteUpdated(Channel),
|
||||
MembershipUpdated(MembershipUpdated),
|
||||
@@ -592,6 +655,8 @@ pub struct ChannelsForUser {
|
||||
pub channel_memberships: Vec<channel_member::Model>,
|
||||
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
|
||||
pub hosted_projects: Vec<proto::HostedProject>,
|
||||
pub dev_servers: Vec<dev_server::Model>,
|
||||
pub remote_projects: Vec<proto::RemoteProject>,
|
||||
|
||||
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
|
||||
pub observed_channel_messages: Vec<proto::ChannelMessageId>,
|
||||
@@ -633,6 +698,30 @@ pub struct RejoinedProject {
|
||||
pub language_servers: Vec<proto::LanguageServer>,
|
||||
}
|
||||
|
||||
impl RejoinedProject {
|
||||
pub fn to_proto(&self) -> proto::RejoinedProject {
|
||||
proto::RejoinedProject {
|
||||
id: self.id.to_proto(),
|
||||
worktrees: self
|
||||
.worktrees
|
||||
.iter()
|
||||
.map(|worktree| proto::WorktreeMetadata {
|
||||
id: worktree.id,
|
||||
root_name: worktree.root_name.clone(),
|
||||
visible: worktree.visible,
|
||||
abs_path: worktree.abs_path.clone(),
|
||||
})
|
||||
.collect(),
|
||||
collaborators: self
|
||||
.collaborators
|
||||
.iter()
|
||||
.map(|collaborator| collaborator.to_proto())
|
||||
.collect(),
|
||||
language_servers: self.language_servers.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RejoinedWorktree {
|
||||
pub id: u64,
|
||||
|
||||
@@ -84,6 +84,7 @@ id_type!(NotificationId);
|
||||
id_type!(NotificationKindId);
|
||||
id_type!(ProjectCollaboratorId);
|
||||
id_type!(ProjectId);
|
||||
id_type!(RemoteProjectId);
|
||||
id_type!(ReplicaId);
|
||||
id_type!(RoomId);
|
||||
id_type!(RoomParticipantId);
|
||||
@@ -270,3 +271,18 @@ impl Into<i32> for ChannelVisibility {
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
|
||||
pub enum PrincipalId {
|
||||
UserId(UserId),
|
||||
DevServerId(DevServerId),
|
||||
}
|
||||
|
||||
/// Indicate whether a [Buffer] has permissions to edit.
|
||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||
pub enum Capability {
|
||||
/// The buffer is a mutable replica.
|
||||
ReadWrite,
|
||||
/// The buffer is a read-only replica.
|
||||
ReadOnly,
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod projects;
|
||||
pub mod rate_buckets;
|
||||
pub mod remote_projects;
|
||||
pub mod rooms;
|
||||
pub mod servers;
|
||||
pub mod users;
|
||||
|
||||
@@ -640,10 +640,15 @@ impl Database {
|
||||
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
|
||||
.await?;
|
||||
|
||||
let dev_servers = self.get_dev_servers(&channel_ids, tx).await?;
|
||||
let remote_projects = self.get_remote_projects(&channel_ids, tx).await?;
|
||||
|
||||
Ok(ChannelsForUser {
|
||||
channel_memberships,
|
||||
channels,
|
||||
hosted_projects,
|
||||
dev_servers,
|
||||
remote_projects,
|
||||
channel_participants,
|
||||
latest_buffer_versions,
|
||||
latest_channel_messages,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use sea_orm::EntityTrait;
|
||||
use sea_orm::{ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, QueryFilter};
|
||||
|
||||
use super::{dev_server, Database, DevServerId};
|
||||
use super::{channel, dev_server, ChannelId, Database, DevServerId, UserId};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_dev_server(
|
||||
@@ -15,4 +15,42 @@ impl Database {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_dev_servers(
|
||||
&self,
|
||||
channel_ids: &Vec<ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<dev_server::Model>> {
|
||||
let servers = dev_server::Entity::find()
|
||||
.filter(dev_server::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers)
|
||||
}
|
||||
|
||||
pub async fn create_dev_server(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
name: &str,
|
||||
hashed_access_token: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(channel::Model, dev_server::Model)> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((channel, dev_server))
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use super::*;
|
||||
use rpc::Notification;
|
||||
use sea_orm::TryInsertResult;
|
||||
use sea_orm::{SelectColumns, TryInsertResult};
|
||||
use time::OffsetDateTime;
|
||||
use util::ResultExt;
|
||||
|
||||
impl Database {
|
||||
/// Inserts a record representing a user joining the chat for a given channel.
|
||||
@@ -480,13 +481,20 @@ impl Database {
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn get_notification_kind_id_by_name(&self, notification_kind: &str) -> Option<i32> {
|
||||
self.notification_kinds_by_id
|
||||
.iter()
|
||||
.find(|(_, kind)| **kind == notification_kind)
|
||||
.map(|kind| kind.0 .0)
|
||||
}
|
||||
|
||||
/// Removes the channel message with the given ID.
|
||||
pub async fn remove_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
message_id: MessageId,
|
||||
user_id: UserId,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
) -> Result<(Vec<ConnectionId>, Vec<NotificationId>)> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut rows = channel_chat_participant::Entity::find()
|
||||
.filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
|
||||
@@ -531,7 +539,29 @@ impl Database {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(participant_connection_ids)
|
||||
let notification_kind_id =
|
||||
self.get_notification_kind_id_by_name("ChannelMessageMention");
|
||||
|
||||
let existing_notifications = notification::Entity::find()
|
||||
.filter(notification::Column::EntityId.eq(message_id))
|
||||
.filter(notification::Column::Kind.eq(notification_kind_id))
|
||||
.select_column(notification::Column::Id)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let existing_notification_ids = existing_notifications
|
||||
.into_iter()
|
||||
.map(|notification| notification.id)
|
||||
.collect();
|
||||
|
||||
// remove all the mention notifications for this message
|
||||
notification::Entity::delete_many()
|
||||
.filter(notification::Column::EntityId.eq(message_id))
|
||||
.filter(notification::Column::Kind.eq(notification_kind_id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((participant_connection_ids, existing_notification_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -629,14 +659,44 @@ impl Database {
|
||||
.await?;
|
||||
}
|
||||
|
||||
let mut mentioned_user_ids = mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
|
||||
let mut update_mention_user_ids = HashSet::default();
|
||||
let mut new_mention_user_ids =
|
||||
mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
|
||||
// Filter out users that were mentioned before
|
||||
for mention in old_mentions {
|
||||
mentioned_user_ids.remove(&mention.user_id.to_proto());
|
||||
for mention in &old_mentions {
|
||||
if new_mention_user_ids.contains(&mention.user_id.to_proto()) {
|
||||
update_mention_user_ids.insert(mention.user_id.to_proto());
|
||||
}
|
||||
|
||||
new_mention_user_ids.remove(&mention.user_id.to_proto());
|
||||
}
|
||||
|
||||
let notification_kind_id =
|
||||
self.get_notification_kind_id_by_name("ChannelMessageMention");
|
||||
|
||||
let existing_notifications = notification::Entity::find()
|
||||
.filter(notification::Column::EntityId.eq(message_id))
|
||||
.filter(notification::Column::Kind.eq(notification_kind_id))
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
// determine which notifications should be updated or deleted
|
||||
let mut deleted_notification_ids = HashSet::default();
|
||||
let mut updated_mention_notifications = Vec::new();
|
||||
for notification in existing_notifications {
|
||||
if update_mention_user_ids.contains(¬ification.recipient_id.to_proto()) {
|
||||
if let Some(notification) =
|
||||
self::notifications::model_to_proto(self, notification).log_err()
|
||||
{
|
||||
updated_mention_notifications.push(notification);
|
||||
}
|
||||
} else {
|
||||
deleted_notification_ids.insert(notification.id);
|
||||
}
|
||||
}
|
||||
|
||||
let mut notifications = Vec::new();
|
||||
for mentioned_user in mentioned_user_ids {
|
||||
for mentioned_user in new_mention_user_ids {
|
||||
notifications.extend(
|
||||
self.create_notification(
|
||||
UserId::from_proto(mentioned_user),
|
||||
@@ -658,6 +718,10 @@ impl Database {
|
||||
notifications,
|
||||
reply_to_message_id: channel_message.reply_to_message_id,
|
||||
timestamp: channel_message.sent_at,
|
||||
deleted_mention_notification_ids: deleted_notification_ids
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>(),
|
||||
updated_mention_notifications,
|
||||
})
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
use rpc::Notification;
|
||||
use util::ResultExt;
|
||||
|
||||
impl Database {
|
||||
/// Initializes the different kinds of notifications by upserting records for them.
|
||||
@@ -53,11 +54,8 @@ impl Database {
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
let kind = row.kind;
|
||||
if let Some(proto) = model_to_proto(self, row) {
|
||||
if let Some(proto) = model_to_proto(self, row).log_err() {
|
||||
result.push(proto);
|
||||
} else {
|
||||
log::warn!("unknown notification kind {:?}", kind);
|
||||
}
|
||||
}
|
||||
result.reverse();
|
||||
@@ -200,7 +198,9 @@ impl Database {
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
Ok(model_to_proto(self, row).map(|notification| (recipient_id, notification)))
|
||||
Ok(model_to_proto(self, row)
|
||||
.map(|notification| (recipient_id, notification))
|
||||
.ok())
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
@@ -241,9 +241,12 @@ impl Database {
|
||||
}
|
||||
}
|
||||
|
||||
fn model_to_proto(this: &Database, row: notification::Model) -> Option<proto::Notification> {
|
||||
let kind = this.notification_kinds_by_id.get(&row.kind)?;
|
||||
Some(proto::Notification {
|
||||
pub fn model_to_proto(this: &Database, row: notification::Model) -> Result<proto::Notification> {
|
||||
let kind = this
|
||||
.notification_kinds_by_id
|
||||
.get(&row.kind)
|
||||
.ok_or_else(|| anyhow!("Unknown notification kind"))?;
|
||||
Ok(proto::Notification {
|
||||
id: row.id.to_proto(),
|
||||
kind: kind.to_string(),
|
||||
timestamp: row.created_at.assume_utc().unix_timestamp() as u64,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use util::ResultExt;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
@@ -28,7 +30,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(ProjectId, proto::Room)>> {
|
||||
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
@@ -65,6 +67,7 @@ impl Database {
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
remote_project_id: ActiveValue::Set(None),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -108,20 +111,22 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project not found"))?;
|
||||
if project.host_connection()? == connection {
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
project::Entity::delete(project.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
} else {
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
@@ -136,9 +141,8 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
@@ -154,12 +158,14 @@ impl Database {
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
let room_id = project
|
||||
.room_id
|
||||
.ok_or_else(|| anyhow!("project not in a room"))?;
|
||||
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok((room, guest_connection_ids))
|
||||
})
|
||||
.await
|
||||
@@ -204,11 +210,10 @@ impl Database {
|
||||
&self,
|
||||
update: &proto::UpdateWorktree,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let _project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
@@ -360,11 +365,10 @@ impl Database {
|
||||
&self,
|
||||
update: &proto::UpdateDiagnosticSummary,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let summary = update
|
||||
.summary
|
||||
.as_ref()
|
||||
@@ -415,10 +419,9 @@ impl Database {
|
||||
&self,
|
||||
update: &proto::StartLanguageServer,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let server = update
|
||||
.server
|
||||
.as_ref()
|
||||
@@ -461,10 +464,9 @@ impl Database {
|
||||
&self,
|
||||
update: &proto::UpdateWorktreeSettings,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
@@ -542,46 +544,36 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find_by_id(id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Adds the given connection to the specified project
|
||||
/// in the current room.
|
||||
pub async fn join_project_in_room(
|
||||
pub async fn join_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(Project, ReplicaId)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
user_id: UserId,
|
||||
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, role) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadOnly,
|
||||
&tx,
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("must join a room first"))?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.room_id != Some(participant.room_id) {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
self.join_project_internal(
|
||||
project,
|
||||
participant.user_id,
|
||||
connection,
|
||||
participant.role.unwrap_or(ChannelRole::Member),
|
||||
&tx,
|
||||
)
|
||||
.await
|
||||
.await?;
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -814,9 +806,8 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, LeftProject)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
) -> Result<TransactionGuard<(Option<proto::Room>, LeftProject)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let result = project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
@@ -871,7 +862,12 @@ impl Database {
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let left_project = LeftProject {
|
||||
id: project_id,
|
||||
host_user_id: project.host_user_id,
|
||||
@@ -888,17 +884,15 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<()> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
project_collaborator::Entity::find()
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.add(project_collaborator::Column::IsHost.eq(true))
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection_id.id))
|
||||
.add(project::Column::Id.eq(project_id))
|
||||
.add(project::Column::HostConnectionId.eq(Some(connection_id.id as i32)))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection_id.owner_id),
|
||||
project::Column::HostConnectionServerId
|
||||
.eq(Some(connection_id.owner_id as i32)),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
@@ -911,39 +905,90 @@ impl Database {
|
||||
.map(|guard| guard.into_inner())
|
||||
}
|
||||
|
||||
/// Returns the current project if the given user is authorized to access it with the specified capability.
|
||||
pub async fn access_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
principal_id: PrincipalId,
|
||||
capability: Capability,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<(project::Model, ChannelRole)> {
|
||||
let (project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(remote_project::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
let user_id = match principal_id {
|
||||
PrincipalId::DevServerId(_) => {
|
||||
if project
|
||||
.host_connection()
|
||||
.is_ok_and(|connection| connection == connection_id)
|
||||
{
|
||||
return Ok((project, ChannelRole::Admin));
|
||||
}
|
||||
return Err(anyhow!("not the project host"))?;
|
||||
}
|
||||
PrincipalId::UserId(user_id) => user_id,
|
||||
};
|
||||
|
||||
let role = if let Some(remote_project) = remote_project {
|
||||
let channel = channel::Entity::find_by_id(remote_project.channel_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such channel"))?;
|
||||
|
||||
self.check_user_is_channel_participant(&channel, user_id, &tx)
|
||||
.await?
|
||||
} else if let Some(room_id) = project.room_id {
|
||||
// what's the users role?
|
||||
let current_participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
current_participant.role.unwrap_or(ChannelRole::Guest)
|
||||
} else {
|
||||
return Err(anyhow!("not authorized to read projects"))?;
|
||||
};
|
||||
|
||||
match capability {
|
||||
Capability::ReadWrite => {
|
||||
if !role.can_edit_projects() {
|
||||
return Err(anyhow!("not authorized to edit projects"))?;
|
||||
}
|
||||
}
|
||||
Capability::ReadOnly => {
|
||||
if !role.can_read_projects() {
|
||||
return Err(anyhow!("not authorized to read projects"))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((project, role))
|
||||
}
|
||||
|
||||
/// Returns the host connection for a read-only request to join a shared project.
|
||||
pub async fn host_for_read_only_project_request(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<ConnectionId> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let current_participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
if !current_participant
|
||||
.role
|
||||
.map_or(false, |role| role.can_read_projects())
|
||||
{
|
||||
Err(anyhow!("not authorized to read projects"))?;
|
||||
}
|
||||
|
||||
let host = project_collaborator::Entity::find()
|
||||
.filter(
|
||||
project_collaborator::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(project_collaborator::Column::IsHost.eq(true)),
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, _) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection_id,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadOnly,
|
||||
&tx,
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to read project host"))?;
|
||||
|
||||
Ok(host.connection())
|
||||
.await?;
|
||||
project.host_connection()
|
||||
})
|
||||
.await
|
||||
.map(|guard| guard.into_inner())
|
||||
@@ -954,83 +999,56 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<ConnectionId> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let current_participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
|
||||
if !current_participant
|
||||
.role
|
||||
.map_or(false, |role| role.can_edit_projects())
|
||||
{
|
||||
Err(anyhow!("not authorized to edit projects"))?;
|
||||
}
|
||||
|
||||
let host = project_collaborator::Entity::find()
|
||||
.filter(
|
||||
project_collaborator::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(project_collaborator::Column::IsHost.eq(true)),
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, _) = self
|
||||
.access_project(
|
||||
project_id,
|
||||
connection_id,
|
||||
PrincipalId::UserId(user_id),
|
||||
Capability::ReadWrite,
|
||||
&tx,
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to read project host"))?;
|
||||
|
||||
Ok(host.connection())
|
||||
.await?;
|
||||
project.host_connection()
|
||||
})
|
||||
.await
|
||||
.map(|guard| guard.into_inner())
|
||||
}
|
||||
|
||||
pub async fn project_collaborators_for_buffer_update(
|
||||
pub async fn connections_for_buffer_update(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
principal_id: PrincipalId,
|
||||
connection_id: ConnectionId,
|
||||
requires_write: bool,
|
||||
) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let current_participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
capability: Capability,
|
||||
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
// Authorize
|
||||
let (project, _) = self
|
||||
.access_project(project_id, connection_id, principal_id, capability, &tx)
|
||||
.await?;
|
||||
|
||||
if requires_write
|
||||
&& !current_participant
|
||||
.role
|
||||
.map_or(false, |role| role.can_edit_projects())
|
||||
{
|
||||
Err(anyhow!("not authorized to edit projects"))?;
|
||||
}
|
||||
let host_connection_id = project.host_connection()?;
|
||||
|
||||
let collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.await?;
|
||||
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|collaborator| collaborator.connection_id == connection_id)
|
||||
{
|
||||
Ok(collaborators)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
}
|
||||
let guest_connection_ids = collaborators
|
||||
.into_iter()
|
||||
.filter_map(|collaborator| {
|
||||
if collaborator.is_host {
|
||||
None
|
||||
} else {
|
||||
Some(collaborator.connection())
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok((host_connection_id, guest_connection_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -1043,24 +1061,39 @@ impl Database {
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
exclude_dev_server: bool,
|
||||
) -> Result<TransactionGuard<HashSet<ConnectionId>>> {
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut connection_ids = HashSet::default();
|
||||
if let Some(host_connection) = project.host_connection().log_err() {
|
||||
if !exclude_dev_server {
|
||||
connection_ids.insert(host_connection);
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
connection_ids.insert(collaborator.connection());
|
||||
}
|
||||
|
||||
if connection_ids.contains(&connection_id) {
|
||||
if connection_ids.contains(&connection_id)
|
||||
|| Some(connection_id) == project.host_connection().ok()
|
||||
{
|
||||
Ok(connection_ids)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
Err(anyhow!(
|
||||
"can only send project updates to a project you're in"
|
||||
))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
@@ -1089,15 +1122,12 @@ impl Database {
|
||||
}
|
||||
|
||||
/// Returns the [`RoomId`] for the given project.
|
||||
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<RoomId> {
|
||||
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<Option<RoomId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
Ok(project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project {} not found", project_id))?;
|
||||
Ok(project
|
||||
.room_id
|
||||
.ok_or_else(|| anyhow!("project not in room"))?)
|
||||
.and_then(|project| project.room_id))
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -1142,7 +1172,7 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::ActiveModel {
|
||||
room_id: ActiveValue::set(room_id),
|
||||
@@ -1173,7 +1203,7 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::Entity::delete_many()
|
||||
.filter(
|
||||
|
||||
261
crates/collab/src/db/queries/remote_projects.rs
Normal file
@@ -0,0 +1,261 @@
|
||||
use anyhow::anyhow;
|
||||
use rpc::{proto, ConnectionId};
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
|
||||
ModelTrait, QueryFilter,
|
||||
};
|
||||
|
||||
use crate::db::ProjectId;
|
||||
|
||||
use super::{
|
||||
channel, project, project_collaborator, remote_project, worktree, ChannelId, Database,
|
||||
DevServerId, RejoinedProject, RemoteProjectId, ResharedProject, ServerId, UserId,
|
||||
};
|
||||
|
||||
impl Database {
|
||||
pub async fn get_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
) -> crate::Result<remote_project::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(remote_project::Entity::find_by_id(remote_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_remote_projects(
|
||||
&self,
|
||||
channel_ids: &Vec<ChannelId>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> crate::Result<Vec<proto::RemoteProject>> {
|
||||
let servers = remote_project::Entity::find()
|
||||
.filter(remote_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
|
||||
.find_also_related(project::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(remote_project, project)| proto::RemoteProject {
|
||||
id: remote_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: remote_project.channel_id.to_proto(),
|
||||
name: remote_project.name,
|
||||
dev_server_id: remote_project.dev_server_id.to_proto(),
|
||||
path: remote_project.path,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub async fn get_remote_projects_for_dev_server(
|
||||
&self,
|
||||
dev_server_id: DevServerId,
|
||||
) -> crate::Result<Vec<proto::RemoteProject>> {
|
||||
self.transaction(|tx| async move {
|
||||
let servers = remote_project::Entity::find()
|
||||
.filter(remote_project::Column::DevServerId.eq(dev_server_id))
|
||||
.find_also_related(project::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
Ok(servers
|
||||
.into_iter()
|
||||
.map(|(remote_project, project)| proto::RemoteProject {
|
||||
id: remote_project.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: remote_project.channel_id.to_proto(),
|
||||
name: remote_project.name,
|
||||
dev_server_id: remote_project.dev_server_id.to_proto(),
|
||||
path: remote_project.path,
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_stale_dev_server_projects(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ProjectId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let projects = project::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id))
|
||||
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(projects.into_iter().map(|p| p.id).collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_remote_project(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
dev_server_id: DevServerId,
|
||||
name: &str,
|
||||
path: &str,
|
||||
user_id: UserId,
|
||||
) -> crate::Result<(channel::Model, remote_project::Model)> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &tx).await?;
|
||||
self.check_user_is_channel_admin(&channel, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
let project = remote_project::Entity::insert(remote_project::ActiveModel {
|
||||
name: ActiveValue::Set(name.to_string()),
|
||||
id: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
dev_server_id: ActiveValue::Set(dev_server_id),
|
||||
path: ActiveValue::Set(path.to_string()),
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((channel, project))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_remote_project(
|
||||
&self,
|
||||
remote_project_id: RemoteProjectId,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> crate::Result<proto::RemoteProject> {
|
||||
self.transaction(|tx| async move {
|
||||
let remote_project = remote_project::Entity::find_by_id(remote_project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?;
|
||||
|
||||
if remote_project.dev_server_id != dev_server_id {
|
||||
return Err(anyhow!("remote project shared from wrong server"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::Set(None),
|
||||
host_user_id: ActiveValue::Set(None),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
id: ActiveValue::NotSet,
|
||||
hosted_project_id: ActiveValue::Set(None),
|
||||
remote_project_id: ActiveValue::Set(Some(remote_project_id)),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(remote_project.to_proto(Some(project)))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reshare_remote_projects(
|
||||
&self,
|
||||
reshared_projects: &Vec<proto::UpdateProject>,
|
||||
dev_server_id: DevServerId,
|
||||
connection: ConnectionId,
|
||||
) -> crate::Result<Vec<ResharedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for reshared_project in reshared_projects {
|
||||
let project_id = ProjectId::from_proto(reshared_project.project_id);
|
||||
let (project, remote_project) = project::Entity::find_by_id(project_id)
|
||||
.find_also_related(remote_project::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
|
||||
if remote_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
|
||||
return Err(anyhow!("remote project reshared from wrong server"))?;
|
||||
}
|
||||
|
||||
let Ok(old_connection_id) = project.host_connection() else {
|
||||
return Err(anyhow!("remote project was not shared"))?;
|
||||
};
|
||||
|
||||
project::Entity::update(project::ActiveModel {
|
||||
id: ActiveValue::set(project_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
ret.push(super::ResharedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators: collaborators
|
||||
.iter()
|
||||
.map(|collaborator| super::ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
});
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_remote_projects(
|
||||
&self,
|
||||
rejoined_projects: &Vec<proto::RejoinProject>,
|
||||
user_id: UserId,
|
||||
connection_id: ConnectionId,
|
||||
) -> crate::Result<Vec<RejoinedProject>> {
|
||||
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
|
||||
self.transaction(|tx| async move {
|
||||
let mut ret = Vec::new();
|
||||
for rejoined_project in rejoined_projects {
|
||||
if let Some(project) = self
|
||||
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
|
||||
.await?
|
||||
{
|
||||
ret.push(project);
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ impl Database {
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<RoomGuard<RefreshedRoom>> {
|
||||
) -> Result<TransactionGuard<RefreshedRoom>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let stale_participant_filter = Condition::all()
|
||||
.add(room_participant::Column::RoomId.eq(room_id))
|
||||
@@ -149,7 +149,7 @@ impl Database {
|
||||
calling_connection: ConnectionId,
|
||||
called_user_id: UserId,
|
||||
initial_project_id: Option<ProjectId>,
|
||||
) -> Result<RoomGuard<(proto::Room, proto::IncomingCall)>> {
|
||||
) -> Result<TransactionGuard<(proto::Room, proto::IncomingCall)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let caller = room_participant::Entity::find()
|
||||
.filter(
|
||||
@@ -201,7 +201,7 @@ impl Database {
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
called_user_id: UserId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
room_participant::Entity::delete_many()
|
||||
.filter(
|
||||
@@ -221,7 +221,7 @@ impl Database {
|
||||
&self,
|
||||
expected_room_id: Option<RoomId>,
|
||||
user_id: UserId,
|
||||
) -> Result<Option<RoomGuard<proto::Room>>> {
|
||||
) -> Result<Option<TransactionGuard<proto::Room>>> {
|
||||
self.optional_room_transaction(|tx| async move {
|
||||
let mut filter = Condition::all()
|
||||
.add(room_participant::Column::UserId.eq(user_id))
|
||||
@@ -258,7 +258,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
calling_connection: ConnectionId,
|
||||
called_user_id: UserId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
@@ -294,7 +294,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<JoinRoom>> {
|
||||
) -> Result<TransactionGuard<JoinRoom>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelId {
|
||||
@@ -349,6 +349,17 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stale_room_connection(&self, user_id: UserId) -> Result<Option<ConnectionId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::UserId.eq(user_id))
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
Ok(participant.and_then(|p| p.answering_connection()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_next_participant_index_internal(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
@@ -403,39 +414,50 @@ impl Database {
|
||||
.get_next_participant_index_internal(room_id, tx)
|
||||
.await?;
|
||||
|
||||
room_participant::Entity::insert_many([room_participant::ActiveModel {
|
||||
room_id: ActiveValue::set(room_id),
|
||||
user_id: ActiveValue::set(user_id),
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
answering_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
answering_connection_lost: ActiveValue::set(false),
|
||||
calling_user_id: ActiveValue::set(user_id),
|
||||
calling_connection_id: ActiveValue::set(connection.id as i32),
|
||||
calling_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
participant_index: ActiveValue::Set(Some(participant_index)),
|
||||
role: ActiveValue::set(Some(role)),
|
||||
id: ActiveValue::NotSet,
|
||||
location_kind: ActiveValue::NotSet,
|
||||
location_project_id: ActiveValue::NotSet,
|
||||
initial_project_id: ActiveValue::NotSet,
|
||||
}])
|
||||
.on_conflict(
|
||||
OnConflict::columns([room_participant::Column::UserId])
|
||||
.update_columns([
|
||||
room_participant::Column::AnsweringConnectionId,
|
||||
room_participant::Column::AnsweringConnectionServerId,
|
||||
room_participant::Column::AnsweringConnectionLost,
|
||||
room_participant::Column::ParticipantIndex,
|
||||
room_participant::Column::Role,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
// If someone has been invited into the room, accept the invite instead of inserting
|
||||
let result = room_participant::Entity::update_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(room_participant::Column::RoomId.eq(room_id))
|
||||
.add(room_participant::Column::UserId.eq(user_id))
|
||||
.add(room_participant::Column::AnsweringConnectionId.is_null()),
|
||||
)
|
||||
.set(room_participant::ActiveModel {
|
||||
participant_index: ActiveValue::Set(Some(participant_index)),
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
answering_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
answering_connection_lost: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
room_participant::Entity::insert(room_participant::ActiveModel {
|
||||
room_id: ActiveValue::set(room_id),
|
||||
user_id: ActiveValue::set(user_id),
|
||||
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
answering_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
answering_connection_lost: ActiveValue::set(false),
|
||||
calling_user_id: ActiveValue::set(user_id),
|
||||
calling_connection_id: ActiveValue::set(connection.id as i32),
|
||||
calling_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
participant_index: ActiveValue::Set(Some(participant_index)),
|
||||
role: ActiveValue::set(Some(role)),
|
||||
id: ActiveValue::NotSet,
|
||||
location_kind: ActiveValue::NotSet,
|
||||
location_project_id: ActiveValue::NotSet,
|
||||
initial_project_id: ActiveValue::NotSet,
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
|
||||
@@ -450,7 +472,7 @@ impl Database {
|
||||
rejoin_room: proto::RejoinRoom,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<RejoinedRoom>> {
|
||||
) -> Result<TransactionGuard<RejoinedRoom>> {
|
||||
let room_id = RoomId::from_proto(rejoin_room.id);
|
||||
self.room_transaction(room_id, |tx| async {
|
||||
let tx = tx;
|
||||
@@ -550,180 +572,12 @@ impl Database {
|
||||
|
||||
let mut rejoined_projects = Vec::new();
|
||||
for rejoined_project in &rejoin_room.rejoined_projects {
|
||||
let project_id = ProjectId::from_proto(rejoined_project.id);
|
||||
let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
for db_worktree in db_worktrees {
|
||||
let mut worktree = RejoinedWorktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
updated_entries: Default::default(),
|
||||
removed_entries: Default::default(),
|
||||
updated_repositories: Default::default(),
|
||||
removed_repositories: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
};
|
||||
|
||||
let rejoined_worktree = rejoined_project
|
||||
.worktrees
|
||||
.iter()
|
||||
.find(|worktree| worktree.id == db_worktree.id as u64);
|
||||
|
||||
// File entries
|
||||
{
|
||||
let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
|
||||
worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
|
||||
} else {
|
||||
worktree_entry::Column::IsDeleted.eq(false)
|
||||
};
|
||||
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_entry::Column::WorktreeId.eq(worktree.id))
|
||||
.add(entry_filter),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if db_entry.is_deleted {
|
||||
worktree.removed_entries.push(db_entry.id as u64);
|
||||
} else {
|
||||
worktree.updated_entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Repository Entries
|
||||
{
|
||||
let repository_entry_filter =
|
||||
if let Some(rejoined_worktree) = rejoined_worktree {
|
||||
worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
|
||||
} else {
|
||||
worktree_repository::Column::IsDeleted.eq(false)
|
||||
};
|
||||
|
||||
let mut db_repositories = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
|
||||
.add(repository_entry_filter),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
while let Some(db_repository) = db_repositories.next().await {
|
||||
let db_repository = db_repository?;
|
||||
if db_repository.is_deleted {
|
||||
worktree
|
||||
.removed_repositories
|
||||
.push(db_repository.work_directory_id as u64);
|
||||
} else {
|
||||
worktree.updated_repositories.push(proto::RepositoryEntry {
|
||||
work_directory_id: db_repository.work_directory_id as u64,
|
||||
branch: db_repository.branch,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(&*tx)
|
||||
if let Some(rejoined_project) = self
|
||||
.rejoin_project_internal(&tx, rejoined_project, user_id, connection)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) = worktrees
|
||||
.iter_mut()
|
||||
.find(|w| w.id == db_settings_file.worktree_id as u64)
|
||||
{
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
rejoined_projects.push(rejoined_project);
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let self_collaborator = if let Some(self_collaborator_ix) = collaborators
|
||||
.iter()
|
||||
.position(|collaborator| collaborator.user_id == user_id)
|
||||
{
|
||||
collaborators.swap_remove(self_collaborator_ix)
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let old_connection_id = self_collaborator.connection();
|
||||
project_collaborator::Entity::update(project_collaborator::ActiveModel {
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
..self_collaborator.into_active_model()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
rejoined_projects.push(RejoinedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators,
|
||||
worktrees,
|
||||
language_servers,
|
||||
});
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
@@ -738,10 +592,192 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_project_internal(
|
||||
&self,
|
||||
tx: &DatabaseTransaction,
|
||||
rejoined_project: &proto::RejoinProject,
|
||||
user_id: UserId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Option<RejoinedProject>> {
|
||||
let project_id = ProjectId::from_proto(rejoined_project.id);
|
||||
let Some(project) = project::Entity::find_by_id(project_id).one(tx).await? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(tx).await?;
|
||||
for db_worktree in db_worktrees {
|
||||
let mut worktree = RejoinedWorktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
updated_entries: Default::default(),
|
||||
removed_entries: Default::default(),
|
||||
updated_repositories: Default::default(),
|
||||
removed_repositories: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
};
|
||||
|
||||
let rejoined_worktree = rejoined_project
|
||||
.worktrees
|
||||
.iter()
|
||||
.find(|worktree| worktree.id == db_worktree.id as u64);
|
||||
|
||||
// File entries
|
||||
{
|
||||
let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
|
||||
worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
|
||||
} else {
|
||||
worktree_entry::Column::IsDeleted.eq(false)
|
||||
};
|
||||
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_entry::Column::WorktreeId.eq(worktree.id))
|
||||
.add(entry_filter),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if db_entry.is_deleted {
|
||||
worktree.removed_entries.push(db_entry.id as u64);
|
||||
} else {
|
||||
worktree.updated_entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Repository Entries
|
||||
{
|
||||
let repository_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
|
||||
worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
|
||||
} else {
|
||||
worktree_repository::Column::IsDeleted.eq(false)
|
||||
};
|
||||
|
||||
let mut db_repositories = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project.id))
|
||||
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
|
||||
.add(repository_entry_filter),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
while let Some(db_repository) = db_repositories.next().await {
|
||||
let db_repository = db_repository?;
|
||||
if db_repository.is_deleted {
|
||||
worktree
|
||||
.removed_repositories
|
||||
.push(db_repository.work_directory_id as u64);
|
||||
} else {
|
||||
worktree.updated_repositories.push(proto::RepositoryEntry {
|
||||
work_directory_id: db_repository.work_directory_id as u64,
|
||||
branch: db_repository.branch,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
|
||||
.stream(tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) = worktrees
|
||||
.iter_mut()
|
||||
.find(|w| w.id == db_settings_file.worktree_id as u64)
|
||||
{
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(tx)
|
||||
.await?;
|
||||
let self_collaborator = if let Some(self_collaborator_ix) = collaborators
|
||||
.iter()
|
||||
.position(|collaborator| collaborator.user_id == user_id)
|
||||
{
|
||||
collaborators.swap_remove(self_collaborator_ix)
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
let old_connection_id = self_collaborator.connection();
|
||||
project_collaborator::Entity::update(project_collaborator::ActiveModel {
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
..self_collaborator.into_active_model()
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
|
||||
let collaborators = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
return Ok(Some(RejoinedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators,
|
||||
worktrees,
|
||||
language_servers,
|
||||
}));
|
||||
}
|
||||
|
||||
pub async fn leave_room(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Option<RoomGuard<LeftRoom>>> {
|
||||
) -> Result<Option<TransactionGuard<LeftRoom>>> {
|
||||
self.optional_room_transaction(|tx| async move {
|
||||
let leaving_participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
@@ -913,7 +949,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
location: proto::ParticipantLocation,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async {
|
||||
let tx = tx;
|
||||
let location_kind;
|
||||
@@ -975,7 +1011,7 @@ impl Database {
|
||||
room_id: RoomId,
|
||||
user_id: UserId,
|
||||
role: ChannelRole,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
) -> Result<TransactionGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
room_participant::Entity::find()
|
||||
.filter(
|
||||
@@ -1128,7 +1164,7 @@ impl Database {
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
|
||||
) -> Result<TransactionGuard<HashSet<ConnectionId>>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let mut participants = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::RoomId.eq(room_id))
|
||||
|
||||
@@ -24,6 +24,7 @@ pub mod observed_channel_messages;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod rate_buckets;
|
||||
pub mod remote_project;
|
||||
pub mod room;
|
||||
pub mod room_participant;
|
||||
pub mod server;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::db::{ChannelId, DevServerId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
@@ -15,3 +16,14 @@ impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
|
||||
proto::DevServer {
|
||||
dev_server_id: self.id.to_proto(),
|
||||
channel_id: self.channel_id.to_proto(),
|
||||
name: self.name.clone(),
|
||||
status: status as i32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use crate::db::{HostedProjectId, ProjectId, RemoteProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
@@ -13,6 +13,7 @@ pub struct Model {
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
pub hosted_project_id: Option<HostedProjectId>,
|
||||
pub remote_project_id: Option<RemoteProjectId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -56,6 +57,12 @@ pub enum Relation {
|
||||
to = "super::hosted_project::Column::Id"
|
||||
)]
|
||||
HostedProject,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::remote_project::Entity",
|
||||
from = "Column::RemoteProjectId",
|
||||
to = "super::remote_project::Column::Id"
|
||||
)]
|
||||
RemoteProject,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
@@ -94,4 +101,10 @@ impl Related<super::hosted_project::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::remote_project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RemoteProject.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
42
crates/collab/src/db/tables/remote_project.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use super::project;
|
||||
use crate::db::{ChannelId, DevServerId, RemoteProjectId};
|
||||
use rpc::proto;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "remote_projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RemoteProjectId,
|
||||
pub channel_id: ChannelId,
|
||||
pub dev_server_id: DevServerId,
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn to_proto(&self, project: Option<project::Model>) -> proto::RemoteProject {
|
||||
proto::RemoteProject {
|
||||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
channel_id: self.channel_id.to_proto(),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
name: self.name.clone(),
|
||||
path: self.path.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::db::{ChannelId, ChannelRole, UserId};
|
||||
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use rpc::ConnectionId;
|
||||
use rpc::{proto, ConnectionId};
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::Serialize;
|
||||
use std::fmt;
|
||||
@@ -10,12 +10,13 @@ use tracing::instrument;
|
||||
#[derive(Default, Serialize)]
|
||||
pub struct ConnectionPool {
|
||||
connections: BTreeMap<ConnectionId, Connection>,
|
||||
connected_users: BTreeMap<UserId, ConnectedUser>,
|
||||
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
|
||||
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
|
||||
channels: ChannelPool,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct ConnectedUser {
|
||||
struct ConnectedPrincipal {
|
||||
connection_ids: HashSet<ConnectionId>,
|
||||
}
|
||||
|
||||
@@ -36,7 +37,7 @@ impl ZedVersion {
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Connection {
|
||||
pub user_id: UserId,
|
||||
pub principal_id: PrincipalId,
|
||||
pub admin: bool,
|
||||
pub zed_version: ZedVersion,
|
||||
}
|
||||
@@ -59,7 +60,7 @@ impl ConnectionPool {
|
||||
self.connections.insert(
|
||||
connection_id,
|
||||
Connection {
|
||||
user_id,
|
||||
principal_id: PrincipalId::UserId(user_id),
|
||||
admin,
|
||||
zed_version,
|
||||
},
|
||||
@@ -68,6 +69,25 @@ impl ConnectionPool {
|
||||
connected_user.connection_ids.insert(connection_id);
|
||||
}
|
||||
|
||||
pub fn add_dev_server(
|
||||
&mut self,
|
||||
connection_id: ConnectionId,
|
||||
dev_server_id: DevServerId,
|
||||
zed_version: ZedVersion,
|
||||
) {
|
||||
self.connections.insert(
|
||||
connection_id,
|
||||
Connection {
|
||||
principal_id: PrincipalId::DevServerId(dev_server_id),
|
||||
admin: false,
|
||||
zed_version,
|
||||
},
|
||||
);
|
||||
|
||||
self.connected_dev_servers
|
||||
.insert(dev_server_id, connection_id);
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
|
||||
let connection = self
|
||||
@@ -75,12 +95,18 @@ impl ConnectionPool {
|
||||
.get_mut(&connection_id)
|
||||
.ok_or_else(|| anyhow!("no such connection"))?;
|
||||
|
||||
let user_id = connection.user_id;
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
self.channels.remove_user(&user_id);
|
||||
match connection.principal_id {
|
||||
PrincipalId::UserId(user_id) => {
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
self.channels.remove_user(&user_id);
|
||||
}
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
self.connected_dev_servers.remove(&dev_server_id);
|
||||
}
|
||||
}
|
||||
self.connections.remove(&connection_id).unwrap();
|
||||
Ok(())
|
||||
@@ -110,6 +136,18 @@ impl ConnectionPool {
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
|
||||
if self.dev_server_connection_id(dev_server_id).is_some() {
|
||||
proto::DevServerStatus::Online
|
||||
} else {
|
||||
proto::DevServerStatus::Offline
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
|
||||
self.connected_dev_servers.get(&dev_server_id).copied()
|
||||
}
|
||||
|
||||
pub fn channel_user_ids(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@@ -154,22 +192,39 @@ impl ConnectionPool {
|
||||
#[cfg(test)]
|
||||
pub fn check_invariants(&self) {
|
||||
for (connection_id, connection) in &self.connections {
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(&connection.user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
match &connection.principal_id {
|
||||
PrincipalId::UserId(user_id) => {
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
assert_eq!(
|
||||
self.connected_dev_servers.get(&dev_server_id).unwrap(),
|
||||
connection_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (user_id, state) in &self.connected_users {
|
||||
for connection_id in &state.connection_ids {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().user_id,
|
||||
*user_id
|
||||
self.connections.get(connection_id).unwrap().principal_id,
|
||||
PrincipalId::UserId(*user_id)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (dev_server_id, connection_id) in &self.connected_dev_servers {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().principal_id,
|
||||
PrincipalId::DevServerId(*dev_server_id)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ mod channel_buffer_tests;
|
||||
mod channel_guest_tests;
|
||||
mod channel_message_tests;
|
||||
mod channel_tests;
|
||||
mod dev_server_tests;
|
||||
mod editor_tests;
|
||||
mod following_tests;
|
||||
mod integration_tests;
|
||||
|
||||
@@ -222,8 +222,18 @@ async fn test_remove_channel_message(
|
||||
.update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
channel_chat_a
|
||||
.update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
|
||||
let msg_id_2 = channel_chat_a
|
||||
.update(cx_a, |c, cx| {
|
||||
c.send_message(
|
||||
MessageParams {
|
||||
text: "two @user_b".to_string(),
|
||||
mentions: vec![(4..12, client_b.id())],
|
||||
reply_to_message_id: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
channel_chat_a
|
||||
@@ -233,10 +243,24 @@ async fn test_remove_channel_message(
|
||||
|
||||
// Clients A and B see all of the messages.
|
||||
executor.run_until_parked();
|
||||
let expected_messages = &["one", "two", "three"];
|
||||
let expected_messages = &["one", "two @user_b", "three"];
|
||||
assert_messages(&channel_chat_a, expected_messages, cx_a);
|
||||
assert_messages(&channel_chat_b, expected_messages, cx_b);
|
||||
|
||||
// Ensure that client B received a notification for the mention.
|
||||
client_b.notification_store().read_with(cx_b, |store, _| {
|
||||
assert_eq!(store.notification_count(), 2);
|
||||
let entry = store.notification_at(0).unwrap();
|
||||
assert_eq!(
|
||||
entry.notification,
|
||||
Notification::ChannelMessageMention {
|
||||
message_id: msg_id_2,
|
||||
sender_id: client_a.id(),
|
||||
channel_id: channel_id.0,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Client A deletes one of their messages.
|
||||
channel_chat_a
|
||||
.update(cx_a, |c, cx| {
|
||||
@@ -261,6 +285,13 @@ async fn test_remove_channel_message(
|
||||
.await
|
||||
.unwrap();
|
||||
assert_messages(&channel_chat_c, expected_messages, cx_c);
|
||||
|
||||
// Ensure we remove the notifications when the message is removed
|
||||
client_b.notification_store().read_with(cx_b, |store, _| {
|
||||
// First notification is the channel invitation, second would be the mention
|
||||
// notification, which should now be removed.
|
||||
assert_eq!(store.notification_count(), 1);
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@@ -598,4 +629,97 @@ async fn test_chat_editing(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// Test update message and keep the mention and check that the body is updated correctly
|
||||
|
||||
channel_chat_a
|
||||
.update(cx_a, |c, cx| {
|
||||
c.update_message(
|
||||
msg_id,
|
||||
MessageParams {
|
||||
text: "Updated body v2 including a mention for @user_b".into(),
|
||||
reply_to_message_id: None,
|
||||
mentions: vec![(37..45, client_b.id())],
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_a.run_until_parked();
|
||||
cx_b.run_until_parked();
|
||||
|
||||
channel_chat_a.update(cx_a, |channel_chat, _| {
|
||||
assert_eq!(
|
||||
channel_chat.find_loaded_message(msg_id).unwrap().body,
|
||||
"Updated body v2 including a mention for @user_b",
|
||||
)
|
||||
});
|
||||
channel_chat_b.update(cx_b, |channel_chat, _| {
|
||||
assert_eq!(
|
||||
channel_chat.find_loaded_message(msg_id).unwrap().body,
|
||||
"Updated body v2 including a mention for @user_b",
|
||||
)
|
||||
});
|
||||
|
||||
client_b.notification_store().read_with(cx_b, |store, _| {
|
||||
let message = store.channel_message_for_id(msg_id);
|
||||
assert!(message.is_some());
|
||||
assert_eq!(
|
||||
message.unwrap().body,
|
||||
"Updated body v2 including a mention for @user_b"
|
||||
);
|
||||
assert_eq!(store.notification_count(), 2);
|
||||
let entry = store.notification_at(0).unwrap();
|
||||
assert_eq!(
|
||||
entry.notification,
|
||||
Notification::ChannelMessageMention {
|
||||
message_id: msg_id,
|
||||
sender_id: client_a.id(),
|
||||
channel_id: channel_id.0,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// If we remove a mention from a message the corresponding mention notification
|
||||
// should also be removed.
|
||||
|
||||
channel_chat_a
|
||||
.update(cx_a, |c, cx| {
|
||||
c.update_message(
|
||||
msg_id,
|
||||
MessageParams {
|
||||
text: "Updated body without a mention".into(),
|
||||
reply_to_message_id: None,
|
||||
mentions: vec![],
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_a.run_until_parked();
|
||||
cx_b.run_until_parked();
|
||||
|
||||
channel_chat_a.update(cx_a, |channel_chat, _| {
|
||||
assert_eq!(
|
||||
channel_chat.find_loaded_message(msg_id).unwrap().body,
|
||||
"Updated body without a mention",
|
||||
)
|
||||
});
|
||||
channel_chat_b.update(cx_b, |channel_chat, _| {
|
||||
assert_eq!(
|
||||
channel_chat.find_loaded_message(msg_id).unwrap().body,
|
||||
"Updated body without a mention",
|
||||
)
|
||||
});
|
||||
client_b.notification_store().read_with(cx_b, |store, _| {
|
||||
// First notification is the channel invitation, second would be the mention
|
||||
// notification, which should now be removed.
|
||||
assert_eq!(store.notification_count(), 1);
|
||||
});
|
||||
}
|
||||
|
||||
110
crates/collab/src/tests/dev_server_tests.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
use std::path::Path;
|
||||
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::VisualTestContext;
|
||||
use rpc::proto::DevServerStatus;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::tests::TestServer;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client) = TestServer::start1(cx).await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel("test", None, (&client, cx), &mut [])
|
||||
.await;
|
||||
|
||||
let resp = client
|
||||
.channel_store()
|
||||
.update(cx, |store, cx| {
|
||||
store.create_dev_server(channel_id, "server-1".to_string(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
client.channel_store().update(cx, |store, _| {
|
||||
assert_eq!(store.dev_servers_for_id(channel_id).len(), 1);
|
||||
assert_eq!(store.dev_servers_for_id(channel_id)[0].name, "server-1");
|
||||
assert_eq!(
|
||||
store.dev_servers_for_id(channel_id)[0].status,
|
||||
DevServerStatus::Offline
|
||||
);
|
||||
});
|
||||
|
||||
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
|
||||
cx.executor().run_until_parked();
|
||||
client.channel_store().update(cx, |store, _| {
|
||||
assert_eq!(
|
||||
store.dev_servers_for_id(channel_id)[0].status,
|
||||
DevServerStatus::Online
|
||||
);
|
||||
});
|
||||
|
||||
dev_server
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/remote",
|
||||
json!({
|
||||
"1.txt": "remote\nremote\nremote",
|
||||
"2.js": "function two() { return 2; }",
|
||||
"3.rs": "mod test",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
client
|
||||
.channel_store()
|
||||
.update(cx, |store, cx| {
|
||||
store.create_remote_project(
|
||||
channel_id,
|
||||
client::DevServerId(resp.dev_server_id),
|
||||
"project-1".to_string(),
|
||||
"/remote".to_string(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let remote_workspace = client
|
||||
.channel_store()
|
||||
.update(cx, |store, cx| {
|
||||
let projects = store.remote_projects_for_id(channel_id);
|
||||
assert_eq!(projects.len(), 1);
|
||||
assert_eq!(projects[0].name, "project-1");
|
||||
workspace::join_remote_project(
|
||||
projects[0].project_id.unwrap(),
|
||||
client.app_state.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let cx2 = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
|
||||
cx2.simulate_keystrokes("cmd-p 1 enter");
|
||||
|
||||
let editor = remote_workspace
|
||||
.update(cx2, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx).unwrap().clone()
|
||||
})
|
||||
.unwrap();
|
||||
editor.update(cx2, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
});
|
||||
cx2.simulate_input("wow!");
|
||||
cx2.simulate_keystrokes("cmd-s");
|
||||
|
||||
let content = dev_server
|
||||
.fs()
|
||||
.load(&Path::new("/remote/1.txt"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(content, "wow!remote\nremote\nremote\n");
|
||||
}
|
||||
@@ -2007,7 +2007,7 @@ async fn test_following_to_channel_notes_without_a_shared_project(
|
||||
});
|
||||
}
|
||||
|
||||
async fn join_channel(
|
||||
pub(crate) async fn join_channel(
|
||||
channel_id: ChannelId,
|
||||
client: &TestClient,
|
||||
cx: &mut TestAppContext,
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
use crate::{
|
||||
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||
tests::{channel_id, room_participants, rust_lang, RoomParticipants, TestClient, TestServer},
|
||||
tests::{
|
||||
channel_id, following_tests::join_channel, room_participants, rust_lang, RoomParticipants,
|
||||
TestClient, TestServer,
|
||||
},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use call::{room, ActiveCall, ParticipantLocation, Room};
|
||||
use client::{User, RECEIVE_TIMEOUT};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::{repository::GitFileStatus, FakeFs, Fs as _, RemoveOptions};
|
||||
use futures::StreamExt as _;
|
||||
use futures::{channel::mpsc, StreamExt as _};
|
||||
use gpui::{
|
||||
px, size, AppContext, BackgroundExecutor, BorrowAppContext, Model, Modifiers, MouseButton,
|
||||
MouseDownEvent, TestAppContext,
|
||||
@@ -18,6 +22,7 @@ use language::{
|
||||
};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath,
|
||||
SearchResult,
|
||||
@@ -37,6 +42,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use unindent::Unindent as _;
|
||||
use workspace::Pane;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
@@ -1863,6 +1869,24 @@ async fn test_active_call_events(
|
||||
executor.run_until_parked();
|
||||
assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]);
|
||||
assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]);
|
||||
|
||||
// Unsharing a project should dispatch the RemoteProjectUnshared event.
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.hang_up(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
mem::take(&mut *events_a.borrow_mut()),
|
||||
vec![room::Event::RoomLeft { channel_id: None }]
|
||||
);
|
||||
assert_eq!(
|
||||
mem::take(&mut *events_b.borrow_mut()),
|
||||
vec![room::Event::RemoteProjectUnshared {
|
||||
project_id: project_a_id,
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
fn active_call_events(cx: &mut TestAppContext) -> Rc<RefCell<Vec<room::Event>>> {
|
||||
@@ -3736,7 +3760,7 @@ async fn test_leaving_project(
|
||||
|
||||
// Client B can't join the project, unless they re-join the room.
|
||||
cx_b.spawn(|cx| {
|
||||
Project::remote(
|
||||
Project::in_room(
|
||||
project_id,
|
||||
client_b.app_state.client.clone(),
|
||||
client_b.user_store().clone(),
|
||||
@@ -4638,9 +4662,17 @@ async fn test_references(
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
client_a.language_registry().add(rust_lang());
|
||||
let mut fake_language_servers = client_a
|
||||
.language_registry()
|
||||
.register_fake_lsp_adapter("Rust", Default::default());
|
||||
let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter(
|
||||
"Rust",
|
||||
FakeLspAdapter {
|
||||
name: "my-fake-lsp-adapter",
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
references_provider: Some(lsp::OneOf::Left(true)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
@@ -4670,12 +4702,40 @@ async fn test_references(
|
||||
|
||||
// Request references to a symbol as the guest.
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::References, _, _>(|params, _| async move {
|
||||
let (lsp_response_tx, rx) = mpsc::unbounded::<Result<Option<Vec<lsp::Location>>>>();
|
||||
fake_language_server.handle_request::<lsp::request::References, _, _>({
|
||||
let rx = Arc::new(Mutex::new(Some(rx)));
|
||||
move |params, _| {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
"file:///root/dir-1/one.rs"
|
||||
);
|
||||
let rx = rx.clone();
|
||||
async move {
|
||||
let mut response_rx = rx.lock().take().unwrap();
|
||||
let result = response_rx.next().await.unwrap();
|
||||
*rx.lock() = Some(response_rx);
|
||||
result
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx));
|
||||
|
||||
// User is informed that a request is pending.
|
||||
executor.run_until_parked();
|
||||
project_b.read_with(cx_b, |project, _| {
|
||||
let status = project.language_server_statuses().next().cloned().unwrap();
|
||||
assert_eq!(status.name, "my-fake-lsp-adapter");
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
"file:///root/dir-1/one.rs"
|
||||
status.pending_work.values().next().unwrap().message,
|
||||
Some("Finding references...".into())
|
||||
);
|
||||
Ok(Some(vec![
|
||||
});
|
||||
|
||||
// Cause the language server to respond.
|
||||
lsp_response_tx
|
||||
.unbounded_send(Ok(Some(vec![
|
||||
lsp::Location {
|
||||
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
|
||||
@@ -4688,16 +4748,18 @@ async fn test_references(
|
||||
uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
|
||||
},
|
||||
]))
|
||||
});
|
||||
|
||||
let references = project_b
|
||||
.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx))
|
||||
.await
|
||||
])))
|
||||
.unwrap();
|
||||
cx_b.read(|cx| {
|
||||
|
||||
let references = references.await.unwrap();
|
||||
executor.run_until_parked();
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
// User is informed that a request is no longer pending.
|
||||
let status = project.language_server_statuses().next().unwrap();
|
||||
assert!(status.pending_work.is_empty());
|
||||
|
||||
assert_eq!(references.len(), 3);
|
||||
assert_eq!(project_b.read(cx).worktrees().count(), 2);
|
||||
assert_eq!(project.worktrees().count(), 2);
|
||||
|
||||
let two_buffer = references[0].buffer.read(cx);
|
||||
let three_buffer = references[2].buffer.read(cx);
|
||||
@@ -4715,6 +4777,32 @@ async fn test_references(
|
||||
assert_eq!(references[1].range.to_offset(two_buffer), 35..38);
|
||||
assert_eq!(references[2].range.to_offset(three_buffer), 37..40);
|
||||
});
|
||||
|
||||
let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx));
|
||||
|
||||
// User is informed that a request is pending.
|
||||
executor.run_until_parked();
|
||||
project_b.read_with(cx_b, |project, _| {
|
||||
let status = project.language_server_statuses().next().cloned().unwrap();
|
||||
assert_eq!(status.name, "my-fake-lsp-adapter");
|
||||
assert_eq!(
|
||||
status.pending_work.values().next().unwrap().message,
|
||||
Some("Finding references...".into())
|
||||
);
|
||||
});
|
||||
|
||||
// Cause the LSP request to fail.
|
||||
lsp_response_tx
|
||||
.unbounded_send(Err(anyhow!("can't find references")))
|
||||
.unwrap();
|
||||
references.await.unwrap_err();
|
||||
|
||||
// User is informed that the request is no longer pending.
|
||||
executor.run_until_parked();
|
||||
project_b.read_with(cx_b, |project, _| {
|
||||
let status = project.language_server_statuses().next().unwrap();
|
||||
assert!(status.pending_work.is_empty());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -4931,9 +5019,35 @@ async fn test_lsp_hover(
|
||||
.await;
|
||||
|
||||
client_a.language_registry().add(rust_lang());
|
||||
let language_server_names = ["rust-analyzer", "CrabLang-ls"];
|
||||
let mut fake_language_servers = client_a
|
||||
.language_registry()
|
||||
.register_fake_lsp_adapter("Rust", Default::default());
|
||||
.register_specific_fake_lsp_adapter(
|
||||
"Rust",
|
||||
true,
|
||||
FakeLspAdapter {
|
||||
name: "rust-analyzer",
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
..lsp::ServerCapabilities::default()
|
||||
},
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
let _other_server = client_a
|
||||
.language_registry()
|
||||
.register_specific_fake_lsp_adapter(
|
||||
"Rust",
|
||||
false,
|
||||
FakeLspAdapter {
|
||||
name: "CrabLang-ls",
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
..lsp::ServerCapabilities::default()
|
||||
},
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
|
||||
let project_id = active_call_a
|
||||
@@ -4946,66 +5060,133 @@ async fn test_lsp_hover(
|
||||
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
|
||||
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
|
||||
|
||||
// Request hover information as the guest.
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::HoverRequest, _, _>(
|
||||
|params, _| async move {
|
||||
assert_eq!(
|
||||
params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position_params.position,
|
||||
lsp::Position::new(0, 22)
|
||||
);
|
||||
Ok(Some(lsp::Hover {
|
||||
contents: lsp::HoverContents::Array(vec![
|
||||
lsp::MarkedString::String("Test hover content.".to_string()),
|
||||
lsp::MarkedString::LanguageString(lsp::LanguageString {
|
||||
language: "Rust".to_string(),
|
||||
value: "let foo = 42;".to_string(),
|
||||
}),
|
||||
]),
|
||||
range: Some(lsp::Range::new(
|
||||
lsp::Position::new(0, 22),
|
||||
lsp::Position::new(0, 29),
|
||||
)),
|
||||
}))
|
||||
},
|
||||
);
|
||||
let mut servers_with_hover_requests = HashMap::default();
|
||||
for i in 0..language_server_names.len() {
|
||||
let new_server = fake_language_servers.next().await.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Failed to get language server #{i} with name {}",
|
||||
&language_server_names[i]
|
||||
)
|
||||
});
|
||||
let new_server_name = new_server.server.name();
|
||||
assert!(
|
||||
!servers_with_hover_requests.contains_key(new_server_name),
|
||||
"Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
|
||||
);
|
||||
let new_server_name = new_server_name.to_string();
|
||||
match new_server_name.as_str() {
|
||||
"CrabLang-ls" => {
|
||||
servers_with_hover_requests.insert(
|
||||
new_server_name.clone(),
|
||||
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
|
||||
move |params, _| {
|
||||
assert_eq!(
|
||||
params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
);
|
||||
let name = new_server_name.clone();
|
||||
async move {
|
||||
Ok(Some(lsp::Hover {
|
||||
contents: lsp::HoverContents::Scalar(
|
||||
lsp::MarkedString::String(format!("{name} hover")),
|
||||
),
|
||||
range: None,
|
||||
}))
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
"rust-analyzer" => {
|
||||
servers_with_hover_requests.insert(
|
||||
new_server_name.clone(),
|
||||
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
|
||||
|params, _| async move {
|
||||
assert_eq!(
|
||||
params
|
||||
.text_document_position_params
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position_params.position,
|
||||
lsp::Position::new(0, 22)
|
||||
);
|
||||
Ok(Some(lsp::Hover {
|
||||
contents: lsp::HoverContents::Array(vec![
|
||||
lsp::MarkedString::String("Test hover content.".to_string()),
|
||||
lsp::MarkedString::LanguageString(lsp::LanguageString {
|
||||
language: "Rust".to_string(),
|
||||
value: "let foo = 42;".to_string(),
|
||||
}),
|
||||
]),
|
||||
range: Some(lsp::Range::new(
|
||||
lsp::Position::new(0, 22),
|
||||
lsp::Position::new(0, 29),
|
||||
)),
|
||||
}))
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
unexpected => panic!("Unexpected server name: {unexpected}"),
|
||||
}
|
||||
}
|
||||
|
||||
let hovers = project_b
|
||||
// Request hover information as the guest.
|
||||
let mut hovers = project_b
|
||||
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
|
||||
.await;
|
||||
assert_eq!(
|
||||
hovers.len(),
|
||||
1,
|
||||
"Expected exactly one hover but got: {hovers:?}"
|
||||
2,
|
||||
"Expected two hovers from both language servers, but got: {hovers:?}"
|
||||
);
|
||||
let hover_info = hovers.into_iter().next().unwrap();
|
||||
|
||||
let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
|
||||
|mut hover_request| async move {
|
||||
hover_request
|
||||
.next()
|
||||
.await
|
||||
.expect("All hover requests should have been triggered")
|
||||
},
|
||||
))
|
||||
.await;
|
||||
|
||||
hovers.sort_by_key(|hover| hover.contents.len());
|
||||
let first_hover = hovers.first().cloned().unwrap();
|
||||
assert_eq!(
|
||||
first_hover.contents,
|
||||
vec![project::HoverBlock {
|
||||
text: "CrabLang-ls hover".to_string(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
},]
|
||||
);
|
||||
let second_hover = hovers.last().cloned().unwrap();
|
||||
assert_eq!(
|
||||
second_hover.contents,
|
||||
vec![
|
||||
project::HoverBlock {
|
||||
text: "Test hover content.".to_string(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
},
|
||||
project::HoverBlock {
|
||||
text: "let foo = 42;".to_string(),
|
||||
kind: HoverBlockKind::Code {
|
||||
language: "Rust".to_string()
|
||||
},
|
||||
}
|
||||
]
|
||||
);
|
||||
buffer_b.read_with(cx_b, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
assert_eq!(hover_info.range.unwrap().to_offset(&snapshot), 22..29);
|
||||
assert_eq!(
|
||||
hover_info.contents,
|
||||
vec![
|
||||
project::HoverBlock {
|
||||
text: "Test hover content.".to_string(),
|
||||
kind: HoverBlockKind::Markdown,
|
||||
},
|
||||
project::HoverBlock {
|
||||
text: "let foo = 42;".to_string(),
|
||||
kind: HoverBlockKind::Code {
|
||||
language: "Rust".to_string()
|
||||
},
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_eq!(second_hover.range.unwrap().to_offset(&snapshot), 22..29);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5914,7 +6095,7 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_cmd_k_left(cx: &mut TestAppContext) {
|
||||
let client = TestServer::start1(cx).await;
|
||||
let (_, client) = TestServer::start1(cx).await;
|
||||
let (workspace, cx) = client.build_test_workspace(cx).await;
|
||||
|
||||
cx.simulate_keystrokes("cmd-n");
|
||||
@@ -5934,3 +6115,282 @@ async fn test_cmd_k_left(cx: &mut TestAppContext) {
|
||||
assert!(workspace.items(cx).collect::<Vec<_>>().len() == 2);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_join_after_restart(cx1: &mut TestAppContext, cx2: &mut TestAppContext) {
|
||||
let (mut server, client) = TestServer::start1(cx1).await;
|
||||
let channel1 = server.make_public_channel("channel1", &client, cx1).await;
|
||||
let channel2 = server.make_public_channel("channel2", &client, cx1).await;
|
||||
|
||||
join_channel(channel1, &client, cx1).await.unwrap();
|
||||
drop(client);
|
||||
|
||||
let client2 = server.create_client(cx2, "user_a").await;
|
||||
join_channel(channel2, &client2, cx2).await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_preview_tabs(cx: &mut TestAppContext) {
|
||||
let (_server, client) = TestServer::start1(cx).await;
|
||||
let (workspace, cx) = client.build_test_workspace(cx).await;
|
||||
let project = workspace.update(cx, |workspace, _| workspace.project().clone());
|
||||
|
||||
let worktree_id = project.update(cx, |project, cx| {
|
||||
project.worktrees().next().unwrap().read(cx).id()
|
||||
});
|
||||
|
||||
let path_1 = ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new("1.txt").into(),
|
||||
};
|
||||
let path_2 = ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new("2.js").into(),
|
||||
};
|
||||
let path_3 = ProjectPath {
|
||||
worktree_id,
|
||||
path: Path::new("3.rs").into(),
|
||||
};
|
||||
|
||||
let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
let get_path = |pane: &Pane, idx: usize, cx: &AppContext| {
|
||||
pane.item_for_index(idx).unwrap().project_path(cx).unwrap()
|
||||
};
|
||||
|
||||
// Opening item 3 as a "permanent" tab
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.open_path(path_3.clone(), None, false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(pane.preview_item_id(), None);
|
||||
|
||||
assert!(!pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Open item 1 as preview
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.open_path_preview(path_1.clone(), None, true, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Open item 2 as preview
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_2.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Going back should show item 1 as preview
|
||||
workspace
|
||||
.update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Closing item 1
|
||||
pane.update(cx, |pane, cx| {
|
||||
pane.close_item_by_id(
|
||||
pane.active_item().unwrap().item_id(),
|
||||
workspace::SaveIntent::Skip,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(pane.preview_item_id(), None);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Going back should show item 1 as preview
|
||||
workspace
|
||||
.update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_3.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Close permanent tab
|
||||
pane.update(cx, |pane, cx| {
|
||||
let id = pane.items().nth(0).unwrap().item_id();
|
||||
pane.close_item_by_id(id, workspace::SaveIntent::Skip, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Split pane to the right
|
||||
pane.update(cx, |pane, cx| {
|
||||
pane.split(workspace::SplitDirection::Right, cx);
|
||||
});
|
||||
|
||||
let right_pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
right_pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(pane.preview_item_id(), None);
|
||||
|
||||
assert!(!pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Open item 2 as preview in right pane
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
right_pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_2.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
// Focus left pane
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.activate_pane_in_direction(workspace::SplitDirection::Left, cx)
|
||||
});
|
||||
|
||||
// Open item 2 as preview in left pane
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 1);
|
||||
assert_eq!(get_path(pane, 0, cx), path_2.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
|
||||
right_pane.update(cx, |pane, cx| {
|
||||
assert_eq!(pane.items_len(), 2);
|
||||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(get_path(pane, 1, cx), path_2.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(1).unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
assert!(!pane.can_navigate_forward());
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::{
|
||||
auth::split_dev_server_token,
|
||||
db::{tests::TestDb, NewUserParams, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Principal, Server, ZedVersion, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||
@@ -135,9 +136,10 @@ impl TestServer {
|
||||
(server, client_a, client_b, channel_id)
|
||||
}
|
||||
|
||||
pub async fn start1(cx: &mut TestAppContext) -> TestClient {
|
||||
pub async fn start1(cx: &mut TestAppContext) -> (TestServer, TestClient) {
|
||||
let mut server = Self::start(cx.executor().clone()).await;
|
||||
server.create_client(cx, "user_a").await
|
||||
let client = server.create_client(cx, "user_a").await;
|
||||
(server, client)
|
||||
}
|
||||
|
||||
pub async fn reset(&self) {
|
||||
@@ -301,6 +303,130 @@ impl TestServer {
|
||||
client
|
||||
}
|
||||
|
||||
pub async fn create_dev_server(
|
||||
&self,
|
||||
access_token: String,
|
||||
cx: &mut TestAppContext,
|
||||
) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
if cx.has_global::<SettingsStore>() {
|
||||
panic!("Same cx used to create two test clients")
|
||||
}
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
release_channel::init("0.0.0", cx);
|
||||
client::init_settings(cx);
|
||||
});
|
||||
let (dev_server_id, _) = split_dev_server_token(&access_token).unwrap();
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let mut client = cx.update(|cx| Client::new(clock, http.clone(), cx));
|
||||
let server = self.server.clone();
|
||||
let db = self.app_state.db.clone();
|
||||
let connection_killers = self.connection_killers.clone();
|
||||
let forbid_connections = self.forbid_connections.clone();
|
||||
Arc::get_mut(&mut client)
|
||||
.unwrap()
|
||||
.set_id(1)
|
||||
.set_dev_server_token(client::DevServerToken(access_token.clone()))
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(
|
||||
credentials,
|
||||
&Credentials::DevServer {
|
||||
token: client::DevServerToken(access_token.to_string())
|
||||
}
|
||||
);
|
||||
|
||||
let server = server.clone();
|
||||
let db = db.clone();
|
||||
let connection_killers = connection_killers.clone();
|
||||
let forbid_connections = forbid_connections.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
if forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))
|
||||
} else {
|
||||
let (client_conn, server_conn, killed) =
|
||||
Connection::in_memory(cx.background_executor().clone());
|
||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||
let dev_server = db
|
||||
.get_dev_server(dev_server_id)
|
||||
.await
|
||||
.expect("retrieving dev_server failed");
|
||||
cx.background_executor()
|
||||
.spawn(server.handle_connection(
|
||||
server_conn,
|
||||
"dev-server".to_string(),
|
||||
Principal::DevServer(dev_server),
|
||||
ZedVersion(SemanticVersion::new(1, 0, 0)),
|
||||
Some(connection_id_tx),
|
||||
Executor::Deterministic(cx.background_executor().clone()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.map_err(|e| {
|
||||
EstablishConnectionError::Other(anyhow!(
|
||||
"{} (is server shutting down?)",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
connection_killers
|
||||
.lock()
|
||||
.insert(connection_id.into(), killed);
|
||||
Ok(client_conn)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
workspace_store,
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init(&client, cx);
|
||||
client::init(&client, cx);
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client, user_store.clone(), cx);
|
||||
notifications::init(client.clone(), user_store, cx);
|
||||
collab_ui::init(&app_state, cx);
|
||||
file_finder::init(cx);
|
||||
menu::init();
|
||||
headless::init(
|
||||
client.clone(),
|
||||
headless::AppState {
|
||||
languages: app_state.languages.clone(),
|
||||
user_store: app_state.user_store.clone(),
|
||||
fs: fs.clone(),
|
||||
node_runtime: app_state.node_runtime.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
TestClient {
|
||||
app_state,
|
||||
username: "dev-server".to_string(),
|
||||
channel_store: cx.read(ChannelStore::global).clone(),
|
||||
notification_store: cx.read(NotificationStore::global).clone(),
|
||||
state: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn disconnect_client(&self, peer_id: PeerId) {
|
||||
self.connection_killers
|
||||
.lock()
|
||||
|
||||
@@ -39,6 +39,7 @@ db.workspace = true
|
||||
editor.workspace = true
|
||||
emojis.workspace = true
|
||||
extensions_ui.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -22,8 +22,9 @@ use std::{
|
||||
};
|
||||
use ui::{prelude::*, Label};
|
||||
use util::ResultExt;
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle},
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle, TabContentParams},
|
||||
register_followable_item,
|
||||
searchable::SearchableItemHandle,
|
||||
ItemNavHistory, Pane, SaveIntent, Toast, ViewId, Workspace, WorkspaceId,
|
||||
@@ -269,7 +270,15 @@ impl ChannelView {
|
||||
cx.write_to_clipboard(ClipboardItem::new(link));
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(Toast::new(0, "Link copied to clipboard"), cx);
|
||||
struct CopyLinkForPositionToast;
|
||||
|
||||
workspace.show_toast(
|
||||
Toast::new(
|
||||
NotificationId::unique::<CopyLinkForPositionToast>(),
|
||||
"Link copied to clipboard",
|
||||
),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -365,7 +374,7 @@ impl Item for ChannelView {
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_content(&self, _: Option<usize>, selected: bool, cx: &WindowContext) -> AnyElement {
|
||||
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
|
||||
let label = if let Some(channel) = self.channel(cx) {
|
||||
match (
|
||||
self.channel_buffer.read(cx).buffer().read(cx).read_only(),
|
||||
@@ -379,7 +388,7 @@ impl Item for ChannelView {
|
||||
"channel notes (disconnected)".to_string()
|
||||
};
|
||||
Label::new(label)
|
||||
.color(if selected {
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
|
||||
@@ -156,7 +156,7 @@ impl ChatPanel {
|
||||
}
|
||||
}
|
||||
}
|
||||
room::Event::Left { channel_id } => {
|
||||
room::Event::RoomLeft { channel_id } => {
|
||||
if channel_id == &this.channel_id(cx) {
|
||||
cx.emit(PanelEvent::Close)
|
||||
}
|
||||
@@ -615,6 +615,8 @@ impl ChatPanel {
|
||||
.child(
|
||||
IconButton::new(("reply", message_id), IconName::ReplyArrowRight)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.cancel_edit_message(cx);
|
||||
|
||||
this.message_editor.update(cx, |editor, cx| {
|
||||
editor.set_reply_to_message_id(message_id);
|
||||
editor.focus_handle(cx).focus(cx);
|
||||
@@ -636,6 +638,8 @@ impl ChatPanel {
|
||||
IconButton::new(("edit", message_id), IconName::Pencil)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.message_editor.update(cx, |editor, cx| {
|
||||
editor.clear_reply_to_message_id();
|
||||
|
||||
let message = this
|
||||
.active_chat()
|
||||
.and_then(|active_chat| {
|
||||
@@ -909,6 +913,10 @@ impl ChatPanel {
|
||||
|
||||
impl Render for ChatPanel {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let channel_id = self
|
||||
.active_chat
|
||||
.as_ref()
|
||||
.map(|(c, _)| c.read(cx).channel_id);
|
||||
let message_editor = self.message_editor.read(cx);
|
||||
|
||||
let reply_to_message_id = message_editor.reply_to_message_id();
|
||||
@@ -1018,6 +1026,7 @@ impl Render for ChatPanel {
|
||||
.child(
|
||||
div().flex_shrink().overflow_hidden().child(
|
||||
h_flex()
|
||||
.id(("reply-preview", reply_to_message_id))
|
||||
.child(Label::new("Replying to ").size(LabelSize::Small))
|
||||
.child(
|
||||
div().font_weight(FontWeight::BOLD).child(
|
||||
@@ -1027,7 +1036,20 @@ impl Render for ChatPanel {
|
||||
))
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
),
|
||||
)
|
||||
.when_some(channel_id, |this, channel_id| {
|
||||
this.cursor_pointer().on_click(cx.listener(
|
||||
move |chat_panel, _, cx| {
|
||||
chat_panel
|
||||
.select_channel(
|
||||
channel_id,
|
||||
reply_to_message_id.into(),
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx)
|
||||
},
|
||||
))
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
|
||||
@@ -9,12 +9,12 @@ use gpui::{
|
||||
Render, SharedString, Task, TextStyle, View, ViewContext, WeakView, WhiteSpace,
|
||||
};
|
||||
use language::{
|
||||
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, Completion,
|
||||
LanguageRegistry, LanguageServerId, ToOffset,
|
||||
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry,
|
||||
LanguageServerId, ToOffset,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::RwLock;
|
||||
use project::search::SearchQuery;
|
||||
use project::{search::SearchQuery, Completion};
|
||||
use settings::Settings;
|
||||
use std::{ops::Range, sync::Arc, time::Duration};
|
||||
use theme::ThemeSettings;
|
||||
@@ -48,7 +48,7 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
buffer: &Model<Buffer>,
|
||||
buffer_position: language::Anchor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Task<anyhow::Result<Vec<language::Completion>>> {
|
||||
) -> Task<anyhow::Result<Vec<Completion>>> {
|
||||
let Some(handle) = self.0.upgrade() else {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
@@ -60,7 +60,7 @@ impl CompletionProvider for MessageEditorCompletionProvider {
|
||||
fn resolve_completions(
|
||||
&self,
|
||||
_completion_indices: Vec<usize>,
|
||||
_completions: Arc<RwLock<Box<[language::Completion]>>>,
|
||||
_completions: Arc<RwLock<Box<[Completion]>>>,
|
||||
_cx: &mut ViewContext<Editor>,
|
||||
) -> Task<anyhow::Result<bool>> {
|
||||
Task::ready(Ok(false))
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
mod channel_modal;
|
||||
mod contact_finder;
|
||||
mod dev_server_modal;
|
||||
|
||||
use self::channel_modal::ChannelModal;
|
||||
use self::dev_server_modal::DevServerModal;
|
||||
use crate::{
|
||||
channel_view::ChannelView, chat_panel::ChatPanel, face_pile::FacePile,
|
||||
CollaborationPanelSettings,
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use channel::{Channel, ChannelEvent, ChannelStore, RemoteProject};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
use contact_finder::ContactFinder;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use feature_flags::{self, FeatureFlagAppExt};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions, anchored, canvas, deferred, div, fill, list, point, prelude::*, px, AnyElement,
|
||||
@@ -24,7 +27,7 @@ use gpui::{
|
||||
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
|
||||
use project::{Fs, Project};
|
||||
use rpc::{
|
||||
proto::{self, ChannelVisibility, PeerId},
|
||||
proto::{self, ChannelVisibility, DevServerStatus, PeerId},
|
||||
ErrorCode, ErrorExt,
|
||||
};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
@@ -188,6 +191,7 @@ enum ListEntry {
|
||||
id: ProjectId,
|
||||
name: SharedString,
|
||||
},
|
||||
RemoteProject(channel::RemoteProject),
|
||||
Contact {
|
||||
contact: Arc<Contact>,
|
||||
calling: bool,
|
||||
@@ -278,10 +282,23 @@ impl CollabPanel {
|
||||
.push(cx.observe(&this.user_store, |this, _, cx| {
|
||||
this.update_entries(true, cx)
|
||||
}));
|
||||
this.subscriptions
|
||||
.push(cx.observe(&this.channel_store, |this, _, cx| {
|
||||
let mut has_opened = false;
|
||||
this.subscriptions.push(cx.observe(
|
||||
&this.channel_store,
|
||||
move |this, channel_store, cx| {
|
||||
if !has_opened {
|
||||
if !channel_store
|
||||
.read(cx)
|
||||
.dev_servers_for_id(ChannelId(1))
|
||||
.is_empty()
|
||||
{
|
||||
this.manage_remote_projects(ChannelId(1), cx);
|
||||
has_opened = true;
|
||||
}
|
||||
}
|
||||
this.update_entries(true, cx)
|
||||
}));
|
||||
},
|
||||
));
|
||||
this.subscriptions
|
||||
.push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx)));
|
||||
this.subscriptions.push(cx.subscribe(
|
||||
@@ -569,6 +586,7 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
let hosted_projects = channel_store.projects_for_id(channel.id);
|
||||
let remote_projects = channel_store.remote_projects_for_id(channel.id);
|
||||
let has_children = channel_store
|
||||
.channel_at_index(mat.candidate_id + 1)
|
||||
.map_or(false, |next_channel| {
|
||||
@@ -604,7 +622,13 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
for (name, id) in hosted_projects {
|
||||
self.entries.push(ListEntry::HostedProject { id, name })
|
||||
self.entries.push(ListEntry::HostedProject { id, name });
|
||||
}
|
||||
|
||||
if cx.has_flag::<feature_flags::Remoting>() {
|
||||
for remote_project in remote_projects {
|
||||
self.entries.push(ListEntry::RemoteProject(remote_project));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1065,6 +1089,59 @@ impl CollabPanel {
|
||||
.tooltip(move |cx| Tooltip::text("Open Project", cx))
|
||||
}
|
||||
|
||||
fn render_remote_project(
|
||||
&self,
|
||||
remote_project: &RemoteProject,
|
||||
is_selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
let id = remote_project.id;
|
||||
let name = remote_project.name.clone();
|
||||
let maybe_project_id = remote_project.project_id;
|
||||
|
||||
let dev_server = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
.find_dev_server_by_id(remote_project.dev_server_id);
|
||||
|
||||
let tooltip_text = SharedString::from(match dev_server {
|
||||
Some(dev_server) => format!("Open Remote Project ({})", dev_server.name),
|
||||
None => "Open Remote Project".to_string(),
|
||||
});
|
||||
|
||||
let dev_server_is_online = dev_server.map(|s| s.status) == Some(DevServerStatus::Online);
|
||||
|
||||
let dev_server_text_color = if dev_server_is_online {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Disabled
|
||||
};
|
||||
|
||||
ListItem::new(ElementId::NamedInteger(
|
||||
"remote-project".into(),
|
||||
id.0 as usize,
|
||||
))
|
||||
.indent_level(2)
|
||||
.indent_step_size(px(20.))
|
||||
.selected(is_selected)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
//TODO display error message if dev server is offline
|
||||
if dev_server_is_online {
|
||||
if let Some(project_id) = maybe_project_id {
|
||||
this.join_remote_project(project_id, cx);
|
||||
}
|
||||
}
|
||||
}))
|
||||
.start_slot(
|
||||
h_flex()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(IconButton::new(0, IconName::FileTree).icon_color(dev_server_text_color)),
|
||||
)
|
||||
.child(Label::new(name.clone()).color(dev_server_text_color))
|
||||
.tooltip(move |cx| Tooltip::text(tooltip_text.clone(), cx))
|
||||
}
|
||||
|
||||
fn has_subchannels(&self, ix: usize) -> bool {
|
||||
self.entries.get(ix).map_or(false, |entry| {
|
||||
if let ListEntry::Channel { has_children, .. } = entry {
|
||||
@@ -1266,11 +1343,24 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
if self.channel_store.read(cx).is_root_channel(channel_id) {
|
||||
context_menu = context_menu.separator().entry(
|
||||
"Manage Members",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| this.manage_members(channel_id, cx)),
|
||||
)
|
||||
context_menu = context_menu
|
||||
.separator()
|
||||
.entry(
|
||||
"Manage Members",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| {
|
||||
this.manage_members(channel_id, cx)
|
||||
}),
|
||||
)
|
||||
.when(cx.has_flag::<feature_flags::Remoting>(), |context_menu| {
|
||||
context_menu.entry(
|
||||
"Manage Remote Projects",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| {
|
||||
this.manage_remote_projects(channel_id, cx)
|
||||
}),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
context_menu = context_menu.entry(
|
||||
"Move this channel",
|
||||
@@ -1534,6 +1624,11 @@ impl CollabPanel {
|
||||
} => {
|
||||
// todo()
|
||||
}
|
||||
ListEntry::RemoteProject(project) => {
|
||||
if let Some(project_id) = project.project_id {
|
||||
self.join_remote_project(project_id, cx)
|
||||
}
|
||||
}
|
||||
|
||||
ListEntry::OutgoingRequest(_) => {}
|
||||
ListEntry::ChannelEditor { .. } => {}
|
||||
@@ -1706,6 +1801,18 @@ impl CollabPanel {
|
||||
self.show_channel_modal(channel_id, channel_modal::Mode::ManageMembers, cx);
|
||||
}
|
||||
|
||||
fn manage_remote_projects(&mut self, channel_id: ChannelId, cx: &mut ViewContext<Self>) {
|
||||
let channel_store = self.channel_store.clone();
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, |cx| {
|
||||
DevServerModal::new(channel_store.clone(), channel_id, cx)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn remove_selected_channel(&mut self, _: &Remove, cx: &mut ViewContext<Self>) {
|
||||
if let Some(channel) = self.selected_channel() {
|
||||
self.remove_channel(channel.id, cx)
|
||||
@@ -2006,6 +2113,18 @@ impl CollabPanel {
|
||||
.detach_and_prompt_err("Failed to join channel", cx, |_, _| None)
|
||||
}
|
||||
|
||||
fn join_remote_project(&mut self, project_id: ProjectId, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let app_state = workspace.read(cx).app_state().clone();
|
||||
workspace::join_remote_project(project_id, app_state, cx).detach_and_prompt_err(
|
||||
"Failed to join project",
|
||||
cx,
|
||||
|_, _| None,
|
||||
)
|
||||
}
|
||||
|
||||
fn join_channel_chat(&mut self, channel_id: ChannelId, cx: &mut ViewContext<Self>) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
@@ -2141,6 +2260,9 @@ impl CollabPanel {
|
||||
ListEntry::HostedProject { id, name } => self
|
||||
.render_channel_project(*id, name, is_selected, cx)
|
||||
.into_any_element(),
|
||||
ListEntry::RemoteProject(remote_project) => self
|
||||
.render_remote_project(remote_project, is_selected, cx)
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2519,7 +2641,9 @@ impl CollabPanel {
|
||||
const FACEPILE_LIMIT: usize = 3;
|
||||
let participants = self.channel_store.read(cx).channel_participants(channel_id);
|
||||
|
||||
let face_pile = if !participants.is_empty() {
|
||||
let face_pile = if participants.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let extra_count = participants.len().saturating_sub(FACEPILE_LIMIT);
|
||||
let result = FacePile::new(
|
||||
participants
|
||||
@@ -2540,8 +2664,6 @@ impl CollabPanel {
|
||||
);
|
||||
|
||||
Some(result)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let width = self.width.unwrap_or(px(240.));
|
||||
@@ -2883,6 +3005,11 @@ impl PartialEq for ListEntry {
|
||||
return id == other_id;
|
||||
}
|
||||
}
|
||||
ListEntry::RemoteProject(project) => {
|
||||
if let ListEntry::RemoteProject(other) = other {
|
||||
return project.id == other.id;
|
||||
}
|
||||
}
|
||||
ListEntry::ChannelNotes { channel_id } => {
|
||||
if let ListEntry::ChannelNotes {
|
||||
channel_id: other_id,
|
||||
|
||||
622
crates/collab_ui/src/collab_panel/dev_server_modal.rs
Normal file
@@ -0,0 +1,622 @@
|
||||
use channel::{ChannelStore, DevServer, RemoteProject};
|
||||
use client::{ChannelId, DevServerId, RemoteProjectId};
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
AppContext, ClipboardItem, DismissEvent, EventEmitter, FocusHandle, FocusableView, Model,
|
||||
ScrollHandle, Task, View, ViewContext,
|
||||
};
|
||||
use rpc::proto::{self, CreateDevServerResponse, DevServerStatus};
|
||||
use ui::{prelude::*, Indicator, List, ListHeader, ModalContent, ModalHeader, Tooltip};
|
||||
use util::ResultExt;
|
||||
use workspace::ModalView;
|
||||
|
||||
pub struct DevServerModal {
|
||||
mode: Mode,
|
||||
focus_handle: FocusHandle,
|
||||
scroll_handle: ScrollHandle,
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
remote_project_name_editor: View<Editor>,
|
||||
remote_project_path_editor: View<Editor>,
|
||||
dev_server_name_editor: View<Editor>,
|
||||
_subscriptions: [gpui::Subscription; 2],
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct CreateDevServer {
|
||||
creating: Option<Task<()>>,
|
||||
dev_server: Option<CreateDevServerResponse>,
|
||||
}
|
||||
|
||||
struct CreateRemoteProject {
|
||||
dev_server_id: DevServerId,
|
||||
creating: Option<Task<()>>,
|
||||
remote_project: Option<proto::RemoteProject>,
|
||||
}
|
||||
|
||||
enum Mode {
|
||||
Default,
|
||||
CreateRemoteProject(CreateRemoteProject),
|
||||
CreateDevServer(CreateDevServer),
|
||||
}
|
||||
|
||||
impl DevServerModal {
|
||||
pub fn new(
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let name_editor = cx.new_view(|cx| Editor::single_line(cx));
|
||||
let path_editor = cx.new_view(|cx| Editor::single_line(cx));
|
||||
let dev_server_name_editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::single_line(cx);
|
||||
editor.set_placeholder_text("Dev server name", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let subscriptions = [
|
||||
cx.observe(&channel_store, |_, _, cx| {
|
||||
cx.notify();
|
||||
}),
|
||||
cx.on_focus_out(&focus_handle, |_, _cx| { /* cx.emit(DismissEvent) */ }),
|
||||
];
|
||||
|
||||
Self {
|
||||
mode: Mode::Default,
|
||||
focus_handle,
|
||||
scroll_handle: ScrollHandle::new(),
|
||||
channel_store,
|
||||
channel_id,
|
||||
remote_project_name_editor: name_editor,
|
||||
remote_project_path_editor: path_editor,
|
||||
dev_server_name_editor,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_remote_project(
|
||||
&mut self,
|
||||
dev_server_id: DevServerId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let channel_id = self.channel_id;
|
||||
let name = self
|
||||
.remote_project_name_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
let path = self
|
||||
.remote_project_path_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if name == "" {
|
||||
return;
|
||||
}
|
||||
if path == "" {
|
||||
return;
|
||||
}
|
||||
|
||||
let create = self.channel_store.update(cx, |store, cx| {
|
||||
store.create_remote_project(channel_id, dev_server_id, name, path, cx)
|
||||
});
|
||||
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
let result = create.await;
|
||||
if let Err(e) = &result {
|
||||
cx.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
"Failed to create project",
|
||||
Some(&format!("{:?}. Please try again.", e)),
|
||||
&["Ok"],
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: None,
|
||||
remote_project: result.ok().and_then(|r| r.remote_project),
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
|
||||
self.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: Some(task),
|
||||
remote_project: None,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn create_dev_server(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let name = self
|
||||
.dev_server_name_editor
|
||||
.read(cx)
|
||||
.text(cx)
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if name == "" {
|
||||
return;
|
||||
}
|
||||
|
||||
let dev_server = self.channel_store.update(cx, |store, cx| {
|
||||
store.create_dev_server(self.channel_id, name.clone(), cx)
|
||||
});
|
||||
|
||||
let task = cx.spawn(|this, mut cx| async move {
|
||||
match dev_server.await {
|
||||
Ok(dev_server) => {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateDevServer(CreateDevServer {
|
||||
creating: None,
|
||||
dev_server: Some(dev_server),
|
||||
});
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
Err(e) => {
|
||||
cx.prompt(
|
||||
gpui::PromptLevel::Critical,
|
||||
"Failed to create server",
|
||||
Some(&format!("{:?}. Please try again.", e)),
|
||||
&["Ok"],
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.mode = Mode::CreateDevServer(Default::default());
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::CreateDevServer(CreateDevServer {
|
||||
creating: Some(task),
|
||||
dev_server: None,
|
||||
});
|
||||
cx.notify()
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
|
||||
match self.mode {
|
||||
Mode::Default => cx.emit(DismissEvent),
|
||||
Mode::CreateRemoteProject(_) | Mode::CreateDevServer(_) => {
|
||||
self.mode = Mode::Default;
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_dev_server(
|
||||
&mut self,
|
||||
dev_server: &DevServer,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let dev_server_id = dev_server.id;
|
||||
let status = dev_server.status;
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.group("dev-server")
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
div()
|
||||
.id(("status", dev_server.id.0))
|
||||
.relative()
|
||||
.child(Icon::new(IconName::Server).size(IconSize::Small))
|
||||
.child(
|
||||
div().absolute().bottom_0().left(rems_from_px(8.0)).child(
|
||||
Indicator::dot().color(match status {
|
||||
DevServerStatus::Online => Color::Created,
|
||||
DevServerStatus::Offline => Color::Deleted,
|
||||
}),
|
||||
),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
match status {
|
||||
DevServerStatus::Online => "Online",
|
||||
DevServerStatus::Offline => "Offline",
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(dev_server.name.clone())
|
||||
.child(
|
||||
h_flex()
|
||||
.visible_on_hover("dev-server")
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new("edit-dev-server", IconName::Pencil)
|
||||
.disabled(true) //TODO implement this on the collab side
|
||||
.tooltip(|cx| {
|
||||
Tooltip::text("Coming Soon - Edit dev server", cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("remove-dev-server", IconName::Trash)
|
||||
.disabled(true) //TODO implement this on the collab side
|
||||
.tooltip(|cx| {
|
||||
Tooltip::text("Coming Soon - Remove dev server", cx)
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().gap_1().child(
|
||||
IconButton::new("add-remote-project", IconName::Plus)
|
||||
.tooltip(|cx| Tooltip::text("Add a remote project", cx))
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating: None,
|
||||
remote_project: None,
|
||||
});
|
||||
cx.notify();
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.bg(cx.theme().colors().title_bar_background)
|
||||
.border()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.rounded_md()
|
||||
.my_1()
|
||||
.py_0p5()
|
||||
.px_3()
|
||||
.child(
|
||||
List::new().empty_message("No projects.").children(
|
||||
channel_store
|
||||
.remote_projects_for_id(dev_server.channel_id)
|
||||
.iter()
|
||||
.filter_map(|remote_project| {
|
||||
if remote_project.dev_server_id == dev_server.id {
|
||||
Some(self.render_remote_project(remote_project, cx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
// .child(div().ml_8().child(
|
||||
// Button::new(("add-project", dev_server_id.0), "Add Project").on_click(cx.listener(
|
||||
// move |this, _, cx| {
|
||||
// this.mode = Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
// dev_server_id,
|
||||
// creating: None,
|
||||
// remote_project: None,
|
||||
// });
|
||||
// cx.notify();
|
||||
// },
|
||||
// )),
|
||||
// ))
|
||||
}
|
||||
|
||||
fn render_remote_project(
|
||||
&mut self,
|
||||
project: &RemoteProject,
|
||||
_: &mut ViewContext<Self>,
|
||||
) -> impl IntoElement {
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Icon::new(IconName::FileTree))
|
||||
.child(Label::new(project.name.clone()))
|
||||
.child(Label::new(format!("({})", project.path.clone())).color(Color::Muted))
|
||||
}
|
||||
|
||||
fn render_create_dev_server(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let Mode::CreateDevServer(CreateDevServer {
|
||||
creating,
|
||||
dev_server,
|
||||
}) = &self.mode
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
self.dev_server_name_editor.update(cx, |editor, _| {
|
||||
editor.set_read_only(creating.is_some() || dev_server.is_some())
|
||||
});
|
||||
v_flex()
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
v_flex().py_0p5().px_1().child(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.py_0p5()
|
||||
.child(
|
||||
IconButton::new("back", IconName::ArrowLeft)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(cx.listener(|this, _: &gpui::ClickEvent, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(Headline::new("Register dev server")),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Name")
|
||||
.child(self.dev_server_name_editor.clone())
|
||||
.on_action(
|
||||
cx.listener(|this, _: &menu::Confirm, cx| this.create_dev_server(cx)),
|
||||
)
|
||||
.when(creating.is_none() && dev_server.is_none(), |div| {
|
||||
div.child(
|
||||
Button::new("create-dev-server", "Create").on_click(cx.listener(
|
||||
move |this, _, cx| {
|
||||
this.create_dev_server(cx);
|
||||
},
|
||||
)),
|
||||
)
|
||||
})
|
||||
.when(creating.is_some() && dev_server.is_none(), |div| {
|
||||
div.child(Button::new("create-dev-server", "Creating...").disabled(true))
|
||||
}),
|
||||
)
|
||||
.when_some(dev_server.clone(), |div, dev_server| {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let status = channel_store
|
||||
.find_dev_server_by_id(DevServerId(dev_server.dev_server_id))
|
||||
.map(|server| server.status)
|
||||
.unwrap_or(DevServerStatus::Offline);
|
||||
let instructions = SharedString::from(format!(
|
||||
"zed --dev-server-token {}",
|
||||
dev_server.access_token
|
||||
));
|
||||
div.child(
|
||||
v_flex()
|
||||
.ml_8()
|
||||
.gap_2()
|
||||
.child(Label::new(format!(
|
||||
"Please log into `{}` and run:",
|
||||
dev_server.name
|
||||
)))
|
||||
.child(instructions.clone())
|
||||
.child(
|
||||
IconButton::new("copy-access-token", IconName::Copy)
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(
|
||||
instructions.to_string(),
|
||||
))
|
||||
}))
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(|cx| Tooltip::text("Copy access token", cx)),
|
||||
)
|
||||
.when(status == DevServerStatus::Offline, |this| {
|
||||
this.child(Label::new("Waiting for connection..."))
|
||||
})
|
||||
.when(status == DevServerStatus::Online, |this| {
|
||||
this.child(Label::new("Connection established! 🎊")).child(
|
||||
Button::new("done", "Done").on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn render_default(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let dev_servers = channel_store.dev_servers_for_id(self.channel_id);
|
||||
// let dev_servers = Vec::new();
|
||||
|
||||
v_flex()
|
||||
.id("scroll-container")
|
||||
.h_full()
|
||||
.overflow_y_scroll()
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
ModalHeader::new("Manage Remote Project")
|
||||
.child(Headline::new("Remote Projects").size(HeadlineSize::Small)),
|
||||
)
|
||||
.child(
|
||||
ModalContent::new().child(
|
||||
List::new()
|
||||
.empty_message("No dev servers registered.")
|
||||
.header(Some(
|
||||
ListHeader::new("Dev Servers").end_slot(
|
||||
Button::new("register-dev-server-button", "New Server")
|
||||
.icon(IconName::Plus)
|
||||
.icon_position(IconPosition::Start)
|
||||
.tooltip(|cx| Tooltip::text("Register a new dev server", cx))
|
||||
.on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::CreateDevServer(Default::default());
|
||||
this.dev_server_name_editor
|
||||
.read(cx)
|
||||
.focus_handle(cx)
|
||||
.focus(cx);
|
||||
cx.notify();
|
||||
})),
|
||||
),
|
||||
))
|
||||
.children(dev_servers.iter().map(|dev_server| {
|
||||
self.render_dev_server(dev_server, cx).into_any_element()
|
||||
})),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_create_project(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let Mode::CreateRemoteProject(CreateRemoteProject {
|
||||
dev_server_id,
|
||||
creating,
|
||||
remote_project,
|
||||
}) = &self.mode
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let (dev_server_name, dev_server_status) = channel_store
|
||||
.find_dev_server_by_id(*dev_server_id)
|
||||
.map(|server| (server.name.clone(), server.status))
|
||||
.unwrap_or((SharedString::from(""), DevServerStatus::Offline));
|
||||
v_flex()
|
||||
.px_1()
|
||||
.pt_0p5()
|
||||
.gap_px()
|
||||
.child(
|
||||
ModalHeader::new("Manage Remote Project")
|
||||
.child(Headline::new("Manage Remote Projects")),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.py_0p5()
|
||||
.px_1()
|
||||
.child(div().px_1().py_0p5().child(
|
||||
IconButton::new("back", IconName::ArrowLeft).on_click(cx.listener(
|
||||
|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify()
|
||||
},
|
||||
)),
|
||||
))
|
||||
.child("Add Project..."),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child(
|
||||
div()
|
||||
.id(("status", dev_server_id.0))
|
||||
.relative()
|
||||
.child(Icon::new(IconName::Server))
|
||||
.child(div().absolute().bottom_0().left(rems_from_px(12.0)).child(
|
||||
Indicator::dot().color(match dev_server_status {
|
||||
DevServerStatus::Online => Color::Created,
|
||||
DevServerStatus::Offline => Color::Deleted,
|
||||
}),
|
||||
))
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
match dev_server_status {
|
||||
DevServerStatus::Online => "Online",
|
||||
DevServerStatus::Offline => "Offline",
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(dev_server_name.clone()),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Name")
|
||||
.child(self.remote_project_name_editor.clone())
|
||||
.on_action(cx.listener(|this, _: &menu::Confirm, cx| {
|
||||
cx.focus_view(&this.remote_project_path_editor)
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.ml_5()
|
||||
.gap_2()
|
||||
.child("Path")
|
||||
.child(self.remote_project_path_editor.clone())
|
||||
.on_action(
|
||||
cx.listener(|this, _: &menu::Confirm, cx| this.create_dev_server(cx)),
|
||||
)
|
||||
.when(creating.is_none() && remote_project.is_none(), |div| {
|
||||
div.child(Button::new("create-remote-server", "Create").on_click({
|
||||
let dev_server_id = *dev_server_id;
|
||||
cx.listener(move |this, _, cx| {
|
||||
this.create_remote_project(dev_server_id, cx)
|
||||
})
|
||||
}))
|
||||
})
|
||||
.when(creating.is_some(), |div| {
|
||||
div.child(Button::new("create-dev-server", "Creating...").disabled(true))
|
||||
}),
|
||||
)
|
||||
.when_some(remote_project.clone(), |div, remote_project| {
|
||||
let channel_store = self.channel_store.read(cx);
|
||||
let status = channel_store
|
||||
.find_remote_project_by_id(RemoteProjectId(remote_project.id))
|
||||
.map(|project| {
|
||||
if project.project_id.is_some() {
|
||||
DevServerStatus::Online
|
||||
} else {
|
||||
DevServerStatus::Offline
|
||||
}
|
||||
})
|
||||
.unwrap_or(DevServerStatus::Offline);
|
||||
div.child(
|
||||
v_flex()
|
||||
.ml_5()
|
||||
.ml_8()
|
||||
.gap_2()
|
||||
.when(status == DevServerStatus::Offline, |this| {
|
||||
this.child(Label::new("Waiting for project..."))
|
||||
})
|
||||
.when(status == DevServerStatus::Online, |this| {
|
||||
this.child(Label::new("Project online! 🎊")).child(
|
||||
Button::new("done", "Done").on_click(cx.listener(|this, _, cx| {
|
||||
this.mode = Mode::Default;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
impl ModalView for DevServerModal {}
|
||||
|
||||
impl FocusableView for DevServerModal {
|
||||
fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for DevServerModal {}
|
||||
|
||||
impl Render for DevServerModal {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.track_focus(&self.focus_handle)
|
||||
.elevation_3(cx)
|
||||
.key_context("DevServerModal")
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.pb_4()
|
||||
.w(rems(34.))
|
||||
.min_h(rems(20.))
|
||||
.max_h(rems(40.))
|
||||
.child(match &self.mode {
|
||||
Mode::Default => self.render_default(cx).into_any_element(),
|
||||
Mode::CreateRemoteProject(_) => self.render_create_project(cx).into_any_element(),
|
||||
Mode::CreateDevServer(_) => self.render_create_dev_server(cx).into_any_element(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,7 @@ use std::{sync::Arc, time::Duration};
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{h_flex, prelude::*, v_flex, Avatar, Button, Icon, IconButton, IconName, Label, Tooltip};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
Workspace,
|
||||
@@ -534,8 +535,10 @@ impl NotificationPanel {
|
||||
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.dismiss_notification::<NotificationToast>(0, cx);
|
||||
workspace.show_notification(0, cx, |cx| {
|
||||
let id = NotificationId::unique::<NotificationToast>();
|
||||
|
||||
workspace.dismiss_notification(&id, cx);
|
||||
workspace.show_notification(id, cx, |cx| {
|
||||
let workspace = cx.view().downgrade();
|
||||
cx.new_view(|_| NotificationToast {
|
||||
notification_id,
|
||||
@@ -554,7 +557,8 @@ impl NotificationPanel {
|
||||
self.current_notification_toast.take();
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.dismiss_notification::<NotificationToast>(0, cx)
|
||||
let id = NotificationId::unique::<NotificationToast>();
|
||||
workspace.dismiss_notification(&id, cx)
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
room::Event::Left { .. } => {
|
||||
room::Event::RoomLeft { .. } => {
|
||||
for (_, windows) in notification_windows.drain() {
|
||||
for window in windows {
|
||||
window
|
||||
|
||||
@@ -2,7 +2,7 @@ use anyhow;
|
||||
use gpui::Pixels;
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsSources};
|
||||
use workspace::dock::DockPosition;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
@@ -53,48 +53,52 @@ pub struct MessageEditorSettings {
|
||||
|
||||
impl Settings for CollaborationPanelSettings {
|
||||
const KEY: Option<&'static str> = Some("collaboration_panel");
|
||||
|
||||
type FileContent = PanelSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for ChatPanelSettings {
|
||||
const KEY: Option<&'static str> = Some("chat_panel");
|
||||
|
||||
type FileContent = PanelSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for NotificationPanelSettings {
|
||||
const KEY: Option<&'static str> = Some("notification_panel");
|
||||
|
||||
type FileContent = PanelSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for MessageEditorSettings {
|
||||
const KEY: Option<&'static str> = Some("message_editor");
|
||||
|
||||
type FileContent = MessageEditorSettings;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1042,9 +1042,7 @@ mod tests {
|
||||
async fn test_buffer_management(cx: &mut TestAppContext) {
|
||||
let (copilot, mut lsp) = Copilot::fake(cx);
|
||||
|
||||
let buffer_1 = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "Hello")
|
||||
});
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local("Hello", cx));
|
||||
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
|
||||
.parse()
|
||||
.unwrap();
|
||||
@@ -1062,13 +1060,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
|
||||
let buffer_2 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"Goodbye",
|
||||
)
|
||||
});
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local("Goodbye", cx));
|
||||
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
|
||||
.parse()
|
||||
.unwrap();
|
||||
|
||||
@@ -14,6 +14,7 @@ use language::{
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use std::{path::Path, sync::Arc};
|
||||
use util::{paths, ResultExt};
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{
|
||||
create_and_open_local_file,
|
||||
item::ItemHandle,
|
||||
@@ -25,8 +26,10 @@ use workspace::{
|
||||
use zed_actions::OpenBrowser;
|
||||
|
||||
const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot";
|
||||
const COPILOT_STARTING_TOAST_ID: usize = 1337;
|
||||
const COPILOT_ERROR_TOAST_ID: usize = 1338;
|
||||
|
||||
struct CopilotStartingToast;
|
||||
|
||||
struct CopilotErrorToast;
|
||||
|
||||
pub struct CopilotButton {
|
||||
editor_subscription: Option<(Subscription, usize)>,
|
||||
@@ -74,7 +77,7 @@ impl Render for CopilotButton {
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(
|
||||
COPILOT_ERROR_TOAST_ID,
|
||||
NotificationId::unique::<CopilotErrorToast>(),
|
||||
format!("Copilot can't be started: {}", e),
|
||||
)
|
||||
.on_click(
|
||||
@@ -350,7 +353,10 @@ pub fn initiate_sign_in(cx: &mut WindowContext) {
|
||||
|
||||
let Ok(workspace) = workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(COPILOT_STARTING_TOAST_ID, "Copilot is starting..."),
|
||||
Toast::new(
|
||||
NotificationId::unique::<CopilotStartingToast>(),
|
||||
"Copilot is starting...",
|
||||
),
|
||||
cx,
|
||||
);
|
||||
workspace.weak_handle()
|
||||
@@ -364,11 +370,17 @@ pub fn initiate_sign_in(cx: &mut WindowContext) {
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| match copilot.read(cx).status() {
|
||||
Status::Authorized => workspace.show_toast(
|
||||
Toast::new(COPILOT_STARTING_TOAST_ID, "Copilot has started!"),
|
||||
Toast::new(
|
||||
NotificationId::unique::<CopilotStartingToast>(),
|
||||
"Copilot has started!",
|
||||
),
|
||||
cx,
|
||||
),
|
||||
_ => {
|
||||
workspace.dismiss_toast(COPILOT_STARTING_TOAST_ID, cx);
|
||||
workspace.dismiss_toast(
|
||||
&NotificationId::unique::<CopilotStartingToast>(),
|
||||
cx,
|
||||
);
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.sign_in(cx))
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
@@ -272,7 +272,7 @@ mod tests {
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
|
||||
BufferId, Point,
|
||||
Point,
|
||||
};
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
@@ -729,20 +729,8 @@ mod tests {
|
||||
|
||||
let (copilot, copilot_lsp) = Copilot::fake(cx);
|
||||
|
||||
let buffer_1 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"a = 1\nb = 2\n",
|
||||
)
|
||||
});
|
||||
let buffer_2 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"c = 3\nd = 4\n",
|
||||
)
|
||||
});
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx));
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
|
||||
@@ -32,14 +32,13 @@ use std::{
|
||||
mem,
|
||||
ops::Range,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::ActiveTheme;
|
||||
pub use toolbar_controls::ToolbarControls;
|
||||
use ui::{h_flex, prelude::*, Icon, IconName, Label};
|
||||
use util::TryFutureExt;
|
||||
use workspace::{
|
||||
item::{BreadcrumbText, Item, ItemEvent, ItemHandle},
|
||||
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, TabContentParams},
|
||||
ItemNavHistory, Pane, ToolbarItemLocation, Workspace,
|
||||
};
|
||||
|
||||
@@ -646,10 +645,10 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
Some("Project Diagnostics".into())
|
||||
}
|
||||
|
||||
fn tab_content(&self, _detail: Option<usize>, selected: bool, _: &WindowContext) -> AnyElement {
|
||||
fn tab_content(&self, params: TabContentParams, _: &WindowContext) -> AnyElement {
|
||||
if self.summary.error_count == 0 && self.summary.warning_count == 0 {
|
||||
Label::new("No problems")
|
||||
.color(if selected {
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
@@ -664,7 +663,7 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new(self.summary.error_count.to_string()).color(
|
||||
if selected {
|
||||
if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
@@ -678,7 +677,7 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::ExclamationTriangle).color(Color::Warning))
|
||||
.child(Label::new(self.summary.warning_count.to_string()).color(
|
||||
if selected {
|
||||
if params.selected {
|
||||
Color::Default
|
||||
} else {
|
||||
Color::Muted
|
||||
@@ -805,7 +804,7 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic);
|
||||
let message: SharedString = message;
|
||||
Arc::new(move |cx| {
|
||||
Box::new(move |cx| {
|
||||
let highlight_style: HighlightStyle = cx.theme().colors().text_accent.into();
|
||||
h_flex()
|
||||
.id("diagnostic header")
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
use anyhow::Result;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ProjectDiagnosticsSettings {
|
||||
@@ -15,18 +18,11 @@ pub struct ProjectDiagnosticsSettingsContent {
|
||||
include_warnings: Option<bool>,
|
||||
}
|
||||
|
||||
impl settings::Settings for ProjectDiagnosticsSettings {
|
||||
impl Settings for ProjectDiagnosticsSettings {
|
||||
const KEY: Option<&'static str> = Some("diagnostics");
|
||||
type FileContent = ProjectDiagnosticsSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_cx: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ mod wrap_map;
|
||||
use crate::EditorStyle;
|
||||
use crate::{hover_links::InlayHighlight, movement::TextLayoutDetails, InlayId};
|
||||
pub use block_map::{BlockMap, BlockPoint};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fold_map::FoldMap;
|
||||
use gpui::{Font, HighlightStyle, Hsla, LineLayout, Model, ModelContext, Pixels, UnderlineStyle};
|
||||
use inlay_map::InlayMap;
|
||||
@@ -63,7 +63,7 @@ pub trait ToDisplayPoint {
|
||||
}
|
||||
|
||||
type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
|
||||
type InlayHighlights = BTreeMap<TypeId, HashMap<InlayId, (HighlightStyle, InlayHighlight)>>;
|
||||
type InlayHighlights = TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
|
||||
|
||||
/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
|
||||
/// folding, hard tabs, soft wrapping, custom blocks (like diagnostics), and highlighting.
|
||||
@@ -257,10 +257,15 @@ impl DisplayMap {
|
||||
style: HighlightStyle,
|
||||
) {
|
||||
for highlight in highlights {
|
||||
self.inlay_highlights
|
||||
.entry(type_id)
|
||||
.or_default()
|
||||
.insert(highlight.inlay, (style, highlight));
|
||||
let update = self.inlay_highlights.update(&type_id, |highlights| {
|
||||
highlights.insert(highlight.inlay, (style, highlight.clone()))
|
||||
});
|
||||
if update.is_none() {
|
||||
self.inlay_highlights.insert(
|
||||
type_id,
|
||||
TreeMap::from_ordered_entries([(highlight.inlay, (style, highlight))]),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,6 +359,7 @@ pub struct HighlightedChunk<'a> {
|
||||
pub is_tab: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DisplaySnapshot {
|
||||
pub buffer_snapshot: MultiBufferSnapshot,
|
||||
pub fold_snapshot: fold_map::FoldSnapshot,
|
||||
@@ -427,7 +433,10 @@ impl DisplaySnapshot {
|
||||
} else if range.start.row == self.max_buffer_row()
|
||||
|| (range.end.column > 0 && range.end.row == self.max_buffer_row())
|
||||
{
|
||||
Point::new(range.start.row - 1, self.line_len(range.start.row - 1))
|
||||
Point::new(
|
||||
range.start.row - 1,
|
||||
self.buffer_snapshot.line_len(range.start.row - 1),
|
||||
)
|
||||
} else {
|
||||
self.prev_line_boundary(range.start).0
|
||||
};
|
||||
@@ -657,7 +666,7 @@ impl DisplaySnapshot {
|
||||
layout_line.closest_index_for_x(x) as u32
|
||||
}
|
||||
|
||||
pub fn chars_at(
|
||||
pub fn display_chars_at(
|
||||
&self,
|
||||
mut point: DisplayPoint,
|
||||
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
|
||||
@@ -684,62 +693,26 @@ impl DisplaySnapshot {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn reverse_chars_at(
|
||||
pub fn buffer_chars_at(&self, mut offset: usize) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
self.buffer_snapshot.chars_at(offset).map(move |ch| {
|
||||
let ret = (ch, offset);
|
||||
offset += ch.len_utf8();
|
||||
ret
|
||||
})
|
||||
}
|
||||
|
||||
pub fn reverse_buffer_chars_at(
|
||||
&self,
|
||||
mut point: DisplayPoint,
|
||||
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
|
||||
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
|
||||
self.reverse_text_chunks(point.row())
|
||||
.flat_map(|chunk| chunk.chars().rev())
|
||||
.skip_while({
|
||||
let mut column = self.line_len(point.row());
|
||||
if self.max_point().row() > point.row() {
|
||||
column += 1;
|
||||
}
|
||||
|
||||
move |char| {
|
||||
let at_point = column <= point.column();
|
||||
column = column.saturating_sub(char.len_utf8() as u32);
|
||||
!at_point
|
||||
}
|
||||
})
|
||||
mut offset: usize,
|
||||
) -> impl Iterator<Item = (char, usize)> + '_ {
|
||||
self.buffer_snapshot
|
||||
.reversed_chars_at(offset)
|
||||
.map(move |ch| {
|
||||
if ch == '\n' {
|
||||
*point.row_mut() -= 1;
|
||||
*point.column_mut() = self.line_len(point.row());
|
||||
} else {
|
||||
*point.column_mut() = point.column().saturating_sub(ch.len_utf8() as u32);
|
||||
}
|
||||
(ch, point)
|
||||
offset -= ch.len_utf8();
|
||||
(ch, offset)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn column_to_chars(&self, display_row: u32, target: u32) -> u32 {
|
||||
let mut count = 0;
|
||||
let mut column = 0;
|
||||
for (c, _) in self.chars_at(DisplayPoint::new(display_row, 0)) {
|
||||
if column >= target {
|
||||
break;
|
||||
}
|
||||
count += 1;
|
||||
column += c.len_utf8() as u32;
|
||||
}
|
||||
count
|
||||
}
|
||||
|
||||
pub fn column_from_chars(&self, display_row: u32, char_count: u32) -> u32 {
|
||||
let mut column = 0;
|
||||
|
||||
for (count, (c, _)) in self.chars_at(DisplayPoint::new(display_row, 0)).enumerate() {
|
||||
if c == '\n' || count >= char_count as usize {
|
||||
break;
|
||||
}
|
||||
column += c.len_utf8() as u32;
|
||||
}
|
||||
|
||||
column
|
||||
}
|
||||
|
||||
pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint {
|
||||
let mut clipped = self.block_snapshot.clip_point(point.0, bias);
|
||||
if self.clip_at_line_ends {
|
||||
@@ -808,20 +781,6 @@ impl DisplaySnapshot {
|
||||
result
|
||||
}
|
||||
|
||||
pub fn line_indent(&self, display_row: u32) -> (u32, bool) {
|
||||
let mut indent = 0;
|
||||
let mut is_blank = true;
|
||||
for (c, _) in self.chars_at(DisplayPoint::new(display_row, 0)) {
|
||||
if c == ' ' {
|
||||
indent += 1;
|
||||
} else {
|
||||
is_blank = c == '\n';
|
||||
break;
|
||||
}
|
||||
}
|
||||
(indent, is_blank)
|
||||
}
|
||||
|
||||
pub fn line_indent_for_buffer_row(&self, buffer_row: u32) -> (u32, bool) {
|
||||
let (buffer, range) = self
|
||||
.buffer_snapshot
|
||||
@@ -922,7 +881,7 @@ impl DisplaySnapshot {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub(crate) fn inlay_highlights<Tag: ?Sized + 'static>(
|
||||
&self,
|
||||
) -> Option<&HashMap<InlayId, (HighlightStyle, InlayHighlight)>> {
|
||||
) -> Option<&TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
|
||||
let type_id = TypeId::of::<Tag>();
|
||||
self.inlay_highlights.get(&type_id)
|
||||
}
|
||||
@@ -1025,7 +984,6 @@ pub mod tests {
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{env, sync::Arc};
|
||||
use text::BufferId;
|
||||
use theme::{LoadThemes, SyntaxTheme};
|
||||
use util::test::{marked_text_ranges, sample_text};
|
||||
use Bias::*;
|
||||
@@ -1143,7 +1101,7 @@ pub mod tests {
|
||||
position,
|
||||
height,
|
||||
disposition,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@@ -1486,10 +1444,7 @@ pub mod tests {
|
||||
|
||||
cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
@@ -1574,10 +1529,7 @@ pub mod tests {
|
||||
|
||||
cx.update(|cx| init_test(cx, |_| {}));
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
@@ -1643,10 +1595,7 @@ pub mod tests {
|
||||
|
||||
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
|
||||
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
|
||||
@@ -37,6 +37,7 @@ pub struct BlockMap {
|
||||
|
||||
pub struct BlockMapWriter<'a>(&'a mut BlockMap);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BlockSnapshot {
|
||||
wrap_snapshot: WrapSnapshot,
|
||||
transforms: SumTree<Transform>,
|
||||
@@ -54,7 +55,7 @@ struct BlockRow(u32);
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
|
||||
struct WrapRow(u32);
|
||||
|
||||
pub type RenderBlock = Arc<dyn Fn(&mut BlockContext) -> AnyElement>;
|
||||
pub type RenderBlock = Box<dyn Send + Fn(&mut BlockContext) -> AnyElement>;
|
||||
|
||||
pub struct Block {
|
||||
id: BlockId,
|
||||
@@ -65,15 +66,11 @@ pub struct Block {
|
||||
disposition: BlockDisposition,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BlockProperties<P>
|
||||
where
|
||||
P: Clone,
|
||||
{
|
||||
pub struct BlockProperties<P> {
|
||||
pub position: P,
|
||||
pub height: u8,
|
||||
pub style: BlockStyle,
|
||||
pub render: Arc<dyn Fn(&mut BlockContext) -> AnyElement>,
|
||||
pub render: Box<dyn Send + Fn(&mut BlockContext) -> AnyElement>,
|
||||
pub disposition: BlockDisposition,
|
||||
}
|
||||
|
||||
@@ -1041,21 +1038,21 @@ mod tests {
|
||||
position: buffer_snapshot.anchor_after(Point::new(1, 0)),
|
||||
height: 1,
|
||||
disposition: BlockDisposition::Above,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
position: buffer_snapshot.anchor_after(Point::new(1, 2)),
|
||||
height: 2,
|
||||
disposition: BlockDisposition::Above,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
position: buffer_snapshot.anchor_after(Point::new(3, 3)),
|
||||
height: 3,
|
||||
disposition: BlockDisposition::Below,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -1209,14 +1206,14 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
position: buffer_snapshot.anchor_after(Point::new(1, 12)),
|
||||
disposition: BlockDisposition::Above,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
height: 1,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
position: buffer_snapshot.anchor_after(Point::new(1, 1)),
|
||||
disposition: BlockDisposition::Below,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
height: 1,
|
||||
},
|
||||
]);
|
||||
@@ -1311,7 +1308,7 @@ mod tests {
|
||||
position,
|
||||
height,
|
||||
disposition,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@@ -1325,7 +1322,14 @@ mod tests {
|
||||
wrap_map.sync(tab_snapshot, tab_edits, cx)
|
||||
});
|
||||
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
|
||||
let block_ids = block_map.insert(block_properties.clone());
|
||||
let block_ids =
|
||||
block_map.insert(block_properties.iter().map(|props| BlockProperties {
|
||||
position: props.position,
|
||||
height: props.height,
|
||||
style: props.style,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
disposition: props.disposition,
|
||||
}));
|
||||
for (block_id, props) in block_ids.into_iter().zip(block_properties) {
|
||||
custom_blocks.push((block_id, props));
|
||||
}
|
||||
|
||||
@@ -233,11 +233,11 @@ impl FoldMap {
|
||||
}
|
||||
|
||||
pub fn set_ellipses_color(&mut self, color: Hsla) -> bool {
|
||||
if self.ellipses_color != Some(color) {
|
||||
if self.ellipses_color == Some(color) {
|
||||
false
|
||||
} else {
|
||||
self.ellipses_color = Some(color);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1695,38 +1695,39 @@ mod tests {
|
||||
while inlay_indices.len() < inlay_highlight_count {
|
||||
inlay_indices.insert(rng.gen_range(0..inlays.len()));
|
||||
}
|
||||
let new_highlights = inlay_indices
|
||||
.into_iter()
|
||||
.filter_map(|i| {
|
||||
let (_, inlay) = &inlays[i];
|
||||
let inlay_text_len = inlay.text.len();
|
||||
match inlay_text_len {
|
||||
0 => None,
|
||||
1 => Some(InlayHighlight {
|
||||
inlay: inlay.id,
|
||||
inlay_position: inlay.position,
|
||||
range: 0..1,
|
||||
}),
|
||||
n => {
|
||||
let inlay_text = inlay.text.to_string();
|
||||
let mut highlight_end = rng.gen_range(1..n);
|
||||
let mut highlight_start = rng.gen_range(0..highlight_end);
|
||||
while !inlay_text.is_char_boundary(highlight_end) {
|
||||
highlight_end += 1;
|
||||
}
|
||||
while !inlay_text.is_char_boundary(highlight_start) {
|
||||
highlight_start -= 1;
|
||||
}
|
||||
Some(InlayHighlight {
|
||||
let new_highlights = TreeMap::from_ordered_entries(
|
||||
inlay_indices
|
||||
.into_iter()
|
||||
.filter_map(|i| {
|
||||
let (_, inlay) = &inlays[i];
|
||||
let inlay_text_len = inlay.text.len();
|
||||
match inlay_text_len {
|
||||
0 => None,
|
||||
1 => Some(InlayHighlight {
|
||||
inlay: inlay.id,
|
||||
inlay_position: inlay.position,
|
||||
range: highlight_start..highlight_end,
|
||||
})
|
||||
range: 0..1,
|
||||
}),
|
||||
n => {
|
||||
let inlay_text = inlay.text.to_string();
|
||||
let mut highlight_end = rng.gen_range(1..n);
|
||||
let mut highlight_start = rng.gen_range(0..highlight_end);
|
||||
while !inlay_text.is_char_boundary(highlight_end) {
|
||||
highlight_end += 1;
|
||||
}
|
||||
while !inlay_text.is_char_boundary(highlight_start) {
|
||||
highlight_start -= 1;
|
||||
}
|
||||
Some(InlayHighlight {
|
||||
inlay: inlay.id,
|
||||
inlay_position: inlay.position,
|
||||
range: highlight_start..highlight_end,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight)))
|
||||
.collect();
|
||||
})
|
||||
.map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight))),
|
||||
);
|
||||
log::info!("highlighting inlay ranges {new_highlights:?}");
|
||||
inlay_highlights.insert(TypeId::of::<()>(), new_highlights);
|
||||
}
|
||||
|
||||
@@ -126,12 +126,12 @@ impl WrapMap {
|
||||
) -> bool {
|
||||
let font_with_size = (font, font_size);
|
||||
|
||||
if font_with_size != self.font_with_size {
|
||||
if font_with_size == self.font_with_size {
|
||||
false
|
||||
} else {
|
||||
self.font_with_size = font_with_size;
|
||||
self.rewrap(cx);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -64,9 +64,9 @@ use gpui::{
|
||||
AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds,
|
||||
ClipboardItem, Context, DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView,
|
||||
FontId, FontStyle, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model,
|
||||
MouseButton, ParentElement, Pixels, Render, SharedString, StrikethroughStyle, Styled,
|
||||
StyledText, Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle, View,
|
||||
ViewContext, ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
|
||||
MouseButton, PaintQuad, ParentElement, Pixels, Render, SharedString, Size, StrikethroughStyle,
|
||||
Styled, StyledText, Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle,
|
||||
View, ViewContext, ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
@@ -74,12 +74,12 @@ use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy};
|
||||
pub use inline_completion_provider::*;
|
||||
pub use items::MAX_TAB_TITLE_LEN;
|
||||
use itertools::Itertools;
|
||||
use language::{char_kind, CharKind};
|
||||
use language::{
|
||||
char_kind,
|
||||
language_settings::{self, all_language_settings, InlayHintSettings},
|
||||
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CodeAction,
|
||||
CodeLabel, Completion, CursorShape, Diagnostic, Documentation, IndentKind, IndentSize,
|
||||
Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
|
||||
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel,
|
||||
CursorShape, Diagnostic, Documentation, IndentKind, IndentSize, Language, OffsetRangeExt,
|
||||
Point, Selection, SelectionGoal, TransactionId,
|
||||
};
|
||||
|
||||
use hover_links::{HoverLink, HoveredLinkState, InlayHighlight};
|
||||
@@ -94,7 +94,9 @@ pub use multi_buffer::{
|
||||
use ordered_float::OrderedFloat;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use project::project_settings::{GitGutterSetting, ProjectSettings};
|
||||
use project::{FormatTrigger, Item, Location, Project, ProjectPath, ProjectTransaction};
|
||||
use project::{
|
||||
CodeAction, Completion, FormatTrigger, Item, Location, Project, ProjectPath, ProjectTransaction,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use rpc::proto::*;
|
||||
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide};
|
||||
@@ -116,6 +118,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
pub use sum_tree::Bias;
|
||||
use sum_tree::TreeMap;
|
||||
use text::{BufferId, OffsetUtf16, Rope};
|
||||
use theme::{
|
||||
observe_buffer_font_size_adjustment, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme,
|
||||
@@ -126,6 +129,7 @@ use ui::{
|
||||
Tooltip,
|
||||
};
|
||||
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::Toast;
|
||||
use workspace::{
|
||||
searchable::SearchEvent, ItemNavHistory, SplitDirection, ViewId, Workspace, WorkspaceId,
|
||||
@@ -355,7 +359,31 @@ type CompletionId = usize;
|
||||
// type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
|
||||
// type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
|
||||
|
||||
type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Vec<Range<Anchor>>);
|
||||
type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Arc<[Range<Anchor>]>);
|
||||
|
||||
struct ScrollbarMarkerState {
|
||||
scrollbar_size: Size<Pixels>,
|
||||
dirty: bool,
|
||||
markers: Arc<[PaintQuad]>,
|
||||
pending_refresh: Option<Task<Result<()>>>,
|
||||
}
|
||||
|
||||
impl ScrollbarMarkerState {
|
||||
fn should_refresh(&self, scrollbar_size: Size<Pixels>) -> bool {
|
||||
self.pending_refresh.is_none() && (self.scrollbar_size != scrollbar_size || self.dirty)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ScrollbarMarkerState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
scrollbar_size: Size::default(),
|
||||
dirty: false,
|
||||
markers: Arc::from([]),
|
||||
pending_refresh: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`]
|
||||
///
|
||||
@@ -394,7 +422,8 @@ pub struct Editor {
|
||||
placeholder_text: Option<Arc<str>>,
|
||||
highlight_order: usize,
|
||||
highlighted_rows: HashMap<TypeId, Vec<(usize, Range<Anchor>, Hsla)>>,
|
||||
background_highlights: BTreeMap<TypeId, BackgroundHighlight>,
|
||||
background_highlights: TreeMap<TypeId, BackgroundHighlight>,
|
||||
scrollbar_marker_state: ScrollbarMarkerState,
|
||||
nav_history: Option<ItemNavHistory>,
|
||||
context_menu: RwLock<Option<ContextMenu>>,
|
||||
mouse_context_menu: Option<MouseContextMenu>,
|
||||
@@ -444,6 +473,7 @@ pub struct Editor {
|
||||
>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EditorSnapshot {
|
||||
pub mode: EditorMode,
|
||||
show_gutter: bool,
|
||||
@@ -1299,37 +1329,19 @@ impl InlayHintRefreshReason {
|
||||
|
||||
impl Editor {
|
||||
pub fn single_line(cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
String::new(),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::SingleLine, buffer, None, cx)
|
||||
}
|
||||
|
||||
pub fn multi_line(cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
String::new(),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::Full, buffer, None, cx)
|
||||
}
|
||||
|
||||
pub fn auto_height(max_lines: usize, cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
String::new(),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::AutoHeight { max_lines }, buffer, None, cx)
|
||||
}
|
||||
@@ -1440,6 +1452,7 @@ impl Editor {
|
||||
highlight_order: 0,
|
||||
highlighted_rows: HashMap::default(),
|
||||
background_highlights: Default::default(),
|
||||
scrollbar_marker_state: ScrollbarMarkerState::default(),
|
||||
nav_history: None,
|
||||
context_menu: RwLock::new(None),
|
||||
mouse_context_menu: None,
|
||||
@@ -3730,7 +3743,7 @@ impl Editor {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), cx);
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
&ranges_to_highlight,
|
||||
|theme| theme.editor_highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
@@ -3860,12 +3873,12 @@ impl Editor {
|
||||
}
|
||||
|
||||
this.highlight_background::<DocumentHighlightRead>(
|
||||
read_ranges,
|
||||
&read_ranges,
|
||||
|theme| theme.editor_document_highlight_read_background,
|
||||
cx,
|
||||
);
|
||||
this.highlight_background::<DocumentHighlightWrite>(
|
||||
write_ranges,
|
||||
&write_ranges,
|
||||
|theme| theme.editor_document_highlight_write_background,
|
||||
cx,
|
||||
);
|
||||
@@ -7064,14 +7077,18 @@ impl Editor {
|
||||
}
|
||||
|
||||
// If the language has line comments, toggle those.
|
||||
if let Some(full_comment_prefix) = language
|
||||
if let Some(full_comment_prefixes) = language
|
||||
.line_comment_prefixes()
|
||||
.and_then(|prefixes| prefixes.first())
|
||||
.filter(|prefixes| !prefixes.is_empty())
|
||||
{
|
||||
// Split the comment prefix's trailing whitespace into a separate string,
|
||||
// as that portion won't be used for detecting if a line is a comment.
|
||||
let comment_prefix = full_comment_prefix.trim_end_matches(' ');
|
||||
let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..];
|
||||
let first_prefix = full_comment_prefixes
|
||||
.first()
|
||||
.expect("prefixes is non-empty");
|
||||
let prefix_trimmed_lengths = full_comment_prefixes
|
||||
.iter()
|
||||
.map(|p| p.trim_end_matches(' ').len())
|
||||
.collect::<SmallVec<[usize; 4]>>();
|
||||
|
||||
let mut all_selection_lines_are_comments = true;
|
||||
|
||||
for row in start_row..=end_row {
|
||||
@@ -7079,15 +7096,24 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
|
||||
let prefix_range = comment_prefix_range(
|
||||
snapshot.deref(),
|
||||
row,
|
||||
comment_prefix,
|
||||
comment_prefix_whitespace,
|
||||
);
|
||||
let prefix_range = full_comment_prefixes
|
||||
.iter()
|
||||
.zip(prefix_trimmed_lengths.iter().copied())
|
||||
.map(|(prefix, trimmed_prefix_len)| {
|
||||
comment_prefix_range(
|
||||
snapshot.deref(),
|
||||
row,
|
||||
&prefix[..trimmed_prefix_len],
|
||||
&prefix[trimmed_prefix_len..],
|
||||
)
|
||||
})
|
||||
.max_by_key(|range| range.end.column - range.start.column)
|
||||
.expect("prefixes is non-empty");
|
||||
|
||||
if prefix_range.is_empty() {
|
||||
all_selection_lines_are_comments = false;
|
||||
}
|
||||
|
||||
selection_edit_ranges.push(prefix_range);
|
||||
}
|
||||
|
||||
@@ -7101,12 +7127,12 @@ impl Editor {
|
||||
} else {
|
||||
let min_column = selection_edit_ranges
|
||||
.iter()
|
||||
.map(|r| r.start.column)
|
||||
.map(|range| range.start.column)
|
||||
.min()
|
||||
.unwrap_or(0);
|
||||
edits.extend(selection_edit_ranges.iter().map(|range| {
|
||||
let position = Point::new(range.start.row, min_column);
|
||||
(position..position, full_comment_prefix.clone())
|
||||
(position..position, first_prefix.clone())
|
||||
}));
|
||||
}
|
||||
} else if let Some((full_comment_prefix, comment_suffix)) =
|
||||
@@ -7967,7 +7993,7 @@ impl Editor {
|
||||
});
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.highlight_background::<Self>(
|
||||
ranges_to_highlight,
|
||||
&ranges_to_highlight,
|
||||
|theme| theme.editor_highlighted_line_background,
|
||||
cx,
|
||||
);
|
||||
@@ -8058,15 +8084,15 @@ impl Editor {
|
||||
editor
|
||||
});
|
||||
|
||||
let ranges = this
|
||||
.clear_background_highlights::<DocumentHighlightWrite>(cx)
|
||||
.into_iter()
|
||||
.flat_map(|(_, ranges)| ranges.into_iter())
|
||||
.chain(
|
||||
this.clear_background_highlights::<DocumentHighlightRead>(cx)
|
||||
.into_iter()
|
||||
.flat_map(|(_, ranges)| ranges.into_iter()),
|
||||
)
|
||||
let write_highlights =
|
||||
this.clear_background_highlights::<DocumentHighlightWrite>(cx);
|
||||
let read_highlights =
|
||||
this.clear_background_highlights::<DocumentHighlightRead>(cx);
|
||||
let ranges = write_highlights
|
||||
.iter()
|
||||
.flat_map(|(_, ranges)| ranges.iter())
|
||||
.chain(read_highlights.iter().flat_map(|(_, ranges)| ranges.iter()))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
this.highlight_text::<Rename>(
|
||||
@@ -8084,7 +8110,7 @@ impl Editor {
|
||||
style: BlockStyle::Flex,
|
||||
position: range.start,
|
||||
height: 1,
|
||||
render: Arc::new({
|
||||
render: Box::new({
|
||||
let rename_editor = rename_editor.clone();
|
||||
move |cx: &mut BlockContext| {
|
||||
let mut text_style = cx.editor_style.text.clone();
|
||||
@@ -8807,16 +8833,16 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn toggle_git_blame(&mut self, _: &ToggleGitBlame, cx: &mut ViewContext<Self>) {
|
||||
if !self.show_git_blame {
|
||||
if self.show_git_blame {
|
||||
self.blame_subscription.take();
|
||||
self.blame.take();
|
||||
self.show_git_blame = false
|
||||
} else {
|
||||
if let Err(error) = self.show_git_blame_internal(cx) {
|
||||
log::error!("failed to toggle on 'git blame': {}", error);
|
||||
return;
|
||||
}
|
||||
self.show_git_blame = true
|
||||
} else {
|
||||
self.blame_subscription.take();
|
||||
self.blame.take();
|
||||
self.show_git_blame = false
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -8897,7 +8923,12 @@ impl Editor {
|
||||
|
||||
if let Some(workspace) = self.workspace() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(Toast::new(0x156a5f9ee, message), cx)
|
||||
struct CopyPermalinkToLine;
|
||||
|
||||
workspace.show_toast(
|
||||
Toast::new(NotificationId::unique::<CopyPermalinkToLine>(), message),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -8918,7 +8949,12 @@ impl Editor {
|
||||
|
||||
if let Some(workspace) = self.workspace() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(Toast::new(0x45a8978, message), cx)
|
||||
struct OpenPermalinkToLine;
|
||||
|
||||
workspace.show_toast(
|
||||
Toast::new(NotificationId::unique::<OpenPermalinkToLine>(), message),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -9016,13 +9052,13 @@ impl Editor {
|
||||
|
||||
pub fn highlight_background<T: 'static>(
|
||||
&mut self,
|
||||
ranges: Vec<Range<Anchor>>,
|
||||
ranges: &[Range<Anchor>],
|
||||
color_fetcher: fn(&ThemeColors) -> Hsla,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let snapshot = self.snapshot(cx);
|
||||
// this is to try and catch a panic sooner
|
||||
for range in &ranges {
|
||||
for range in ranges {
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.summary_for_anchor::<usize>(&range.start);
|
||||
@@ -9032,16 +9068,21 @@ impl Editor {
|
||||
}
|
||||
|
||||
self.background_highlights
|
||||
.insert(TypeId::of::<T>(), (color_fetcher, ranges));
|
||||
.insert(TypeId::of::<T>(), (color_fetcher, Arc::from(ranges)));
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn clear_background_highlights<T: 'static>(
|
||||
&mut self,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<BackgroundHighlight> {
|
||||
let text_highlights = self.background_highlights.remove(&TypeId::of::<T>());
|
||||
text_highlights
|
||||
let text_highlights = self.background_highlights.remove(&TypeId::of::<T>())?;
|
||||
if !text_highlights.1.is_empty() {
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
cx.notify();
|
||||
}
|
||||
Some(text_highlights)
|
||||
}
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
@@ -9295,6 +9336,7 @@ impl Editor {
|
||||
multi_buffer::Event::Edited {
|
||||
singleton_buffer_edited,
|
||||
} => {
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
self.refresh_active_diagnostics(cx);
|
||||
self.refresh_code_actions(cx);
|
||||
if self.has_active_inline_completion(cx) {
|
||||
@@ -9362,10 +9404,16 @@ impl Editor {
|
||||
multi_buffer::Event::FileHandleChanged | multi_buffer::Event::Reloaded => {
|
||||
cx.emit(EditorEvent::TitleChanged)
|
||||
}
|
||||
multi_buffer::Event::DiffBaseChanged => cx.emit(EditorEvent::DiffBaseChanged),
|
||||
multi_buffer::Event::DiffBaseChanged => {
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
cx.emit(EditorEvent::DiffBaseChanged);
|
||||
cx.notify();
|
||||
}
|
||||
multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed),
|
||||
multi_buffer::Event::DiagnosticsUpdated => {
|
||||
self.refresh_active_diagnostics(cx);
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
@@ -10133,7 +10181,7 @@ impl Render for Editor {
|
||||
background,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
scrollbar_width: px(12.),
|
||||
scrollbar_width: px(13.),
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
status: cx.theme().status().clone(),
|
||||
inlay_hints_style: HighlightStyle {
|
||||
@@ -10526,7 +10574,7 @@ impl InvalidationRegion for SnippetState {
|
||||
pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> RenderBlock {
|
||||
let (text_without_backticks, code_ranges) = highlight_diagnostic_message(&diagnostic);
|
||||
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
Box::new(move |cx: &mut BlockContext| {
|
||||
let group_id: SharedString = cx.block_id.to_string().into();
|
||||
|
||||
let mut text_style = cx.text_style().clone();
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct EditorSettings {
|
||||
@@ -224,10 +225,9 @@ impl Settings for EditorSettings {
|
||||
type FileContent = EditorSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut gpui::AppContext,
|
||||
sources: SettingsSources<Self::FileContent>,
|
||||
_: &mut AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,8 +38,7 @@ fn test_edit_events(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer =
|
||||
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "123456");
|
||||
let mut buffer = language::Buffer::local("123456", cx);
|
||||
buffer.set_group_interval(Duration::from_secs(1));
|
||||
buffer
|
||||
});
|
||||
@@ -154,9 +153,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut now = Instant::now();
|
||||
let buffer = cx.new_model(|cx| {
|
||||
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "123456")
|
||||
});
|
||||
let buffer = cx.new_model(|cx| language::Buffer::local("123456", cx));
|
||||
let group_interval = buffer.update(cx, |buffer, _| buffer.transaction_group_interval());
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let editor = cx.add_window(|cx| build_editor(buffer.clone(), cx));
|
||||
@@ -227,8 +224,7 @@ fn test_ime_composition(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer =
|
||||
language::Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "abcde");
|
||||
let mut buffer = language::Buffer::local("abcde", cx);
|
||||
// Ensure automatic grouping doesn't occur.
|
||||
buffer.set_group_interval(Duration::ZERO);
|
||||
buffer
|
||||
@@ -2344,21 +2340,10 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
|
||||
None,
|
||||
));
|
||||
|
||||
let toml_buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"a = 1\nb = 2\n",
|
||||
)
|
||||
.with_language(toml_language, cx)
|
||||
});
|
||||
let toml_buffer =
|
||||
cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx).with_language(toml_language, cx));
|
||||
let rust_buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"const c: usize = 3;\n",
|
||||
)
|
||||
.with_language(rust_language, cx)
|
||||
Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx)
|
||||
});
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
@@ -2685,6 +2670,65 @@ fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_join_lines_with_git_diff_base(
|
||||
executor: BackgroundExecutor,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
let diff_base = r#"
|
||||
Line 0
|
||||
Line 1
|
||||
Line 2
|
||||
Line 3
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
cx.set_state(
|
||||
&r#"
|
||||
ˇLine 0
|
||||
Line 1
|
||||
Line 2
|
||||
Line 3
|
||||
"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.set_diff_base(Some(&diff_base));
|
||||
executor.run_until_parked();
|
||||
|
||||
// Join lines
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.join_lines(&JoinLines, cx);
|
||||
});
|
||||
executor.run_until_parked();
|
||||
|
||||
cx.assert_editor_state(
|
||||
&r#"
|
||||
Line 0ˇ Line 1
|
||||
Line 2
|
||||
Line 3
|
||||
"#
|
||||
.unindent(),
|
||||
);
|
||||
// Join again
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.join_lines(&JoinLines, cx);
|
||||
});
|
||||
executor.run_until_parked();
|
||||
|
||||
cx.assert_editor_state(
|
||||
&r#"
|
||||
Line 0 Line 1ˇ Line 2
|
||||
Line 3
|
||||
"#
|
||||
.unindent(),
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -3317,7 +3361,7 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
|
||||
position: snapshot.anchor_after(Point::new(2, 0)),
|
||||
disposition: BlockDisposition::Below,
|
||||
height: 1,
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
}],
|
||||
Some(Autoscroll::fit()),
|
||||
cx,
|
||||
@@ -4245,10 +4289,7 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
|
||||
@@ -4412,10 +4453,7 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
|
||||
|
||||
let text = "fn a() {}";
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
editor
|
||||
@@ -5070,10 +5108,7 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
@@ -5221,10 +5256,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
editor
|
||||
@@ -5411,10 +5443,7 @@ async fn test_auto_replace_emoji_shortcode(cx: &mut gpui::TestAppContext) {
|
||||
Some(tree_sitter_rust::language()),
|
||||
));
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "")
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
editor
|
||||
@@ -6507,7 +6536,7 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
let language = Arc::new(Language::new(
|
||||
LanguageConfig {
|
||||
line_comments: vec!["// ".into()],
|
||||
line_comments: vec!["// ".into(), "//! ".into(), "/// ".into()],
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
@@ -6601,6 +6630,25 @@ async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
|
||||
// c();ˇ»
|
||||
}
|
||||
"});
|
||||
|
||||
// If a selection includes multiple comment prefixes, all lines are uncommented.
|
||||
cx.set_state(indoc! {"
|
||||
fn a() {
|
||||
«// a();
|
||||
/// b();
|
||||
//! c();ˇ»
|
||||
}
|
||||
"});
|
||||
|
||||
cx.update_editor(|e, cx| e.toggle_comments(&ToggleComments::default(), cx));
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
fn a() {
|
||||
«a();
|
||||
b();
|
||||
c();ˇ»
|
||||
}
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -6861,13 +6909,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
|
||||
fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
sample_text(3, 4, 'a'),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
@@ -6951,13 +6993,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
|
||||
primary: None,
|
||||
}
|
||||
});
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
initial_text,
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
|
||||
@@ -7015,13 +7051,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
|
||||
fn test_refresh_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
sample_text(3, 4, 'a'),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
|
||||
let mut excerpt1_id = None;
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
@@ -7106,13 +7136,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
|
||||
fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
sample_text(3, 4, 'a'),
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx));
|
||||
let mut excerpt1_id = None;
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
@@ -7207,10 +7231,7 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
|
||||
"{{} }\n", //
|
||||
);
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), text)
|
||||
.with_language(language, cx)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx).with_language(language, cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (view, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
@@ -7263,7 +7284,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
|
||||
|range: Range<Point>| buffer.anchor_after(range.start)..buffer.anchor_after(range.end);
|
||||
|
||||
editor.highlight_background::<Type1>(
|
||||
vec![
|
||||
&[
|
||||
anchor_range(Point::new(2, 1)..Point::new(2, 3)),
|
||||
anchor_range(Point::new(4, 2)..Point::new(4, 4)),
|
||||
anchor_range(Point::new(6, 3)..Point::new(6, 5)),
|
||||
@@ -7273,7 +7294,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
);
|
||||
editor.highlight_background::<Type2>(
|
||||
vec![
|
||||
&[
|
||||
anchor_range(Point::new(3, 2)..Point::new(3, 5)),
|
||||
anchor_range(Point::new(5, 3)..Point::new(5, 6)),
|
||||
anchor_range(Point::new(7, 4)..Point::new(7, 7)),
|
||||
@@ -8911,31 +8932,13 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) {
|
||||
cx.executor().run_until_parked();
|
||||
}
|
||||
|
||||
let buffer_1 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
sample_text_1.clone(),
|
||||
)
|
||||
});
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text_1.clone(), cx));
|
||||
diff_every_buffer_row(&buffer_1, sample_text_1.clone(), cols, cx);
|
||||
|
||||
let buffer_2 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
1,
|
||||
BufferId::new(cx.entity_id().as_u64() + 1).unwrap(),
|
||||
sample_text_2.clone(),
|
||||
)
|
||||
});
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text_2.clone(), cx));
|
||||
diff_every_buffer_row(&buffer_2, sample_text_2.clone(), cols, cx);
|
||||
|
||||
let buffer_3 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
2,
|
||||
BufferId::new(cx.entity_id().as_u64() + 2).unwrap(),
|
||||
sample_text_3.clone(),
|
||||
)
|
||||
});
|
||||
let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx));
|
||||
diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx);
|
||||
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
@@ -9074,29 +9077,9 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) {
|
||||
"vvvv\nwwww\nxxxx\nyyyy\nzzzz\n{{{{\n||||\n}}}}\n~~~~\n\u{7f}\u{7f}\u{7f}\u{7f}"
|
||||
);
|
||||
|
||||
let buffer_1 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
sample_text_1.clone(),
|
||||
)
|
||||
});
|
||||
|
||||
let buffer_2 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
1,
|
||||
BufferId::new(cx.entity_id().as_u64() + 1).unwrap(),
|
||||
sample_text_2.clone(),
|
||||
)
|
||||
});
|
||||
|
||||
let buffer_3 = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
2,
|
||||
BufferId::new(cx.entity_id().as_u64() + 2).unwrap(),
|
||||
sample_text_3.clone(),
|
||||
)
|
||||
});
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text_1.clone(), cx));
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text_2.clone(), cx));
|
||||
let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx));
|
||||
|
||||
let multi_buffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, ReadWrite);
|
||||
|
||||
@@ -24,7 +24,7 @@ use gpui::{
|
||||
transparent_black, Action, AnchorCorner, AnyElement, AnyView, AvailableSpace, Bounds,
|
||||
ClipboardItem, ContentMask, Corners, CursorStyle, DispatchPhase, Edges, Element,
|
||||
ElementContext, ElementInputHandler, Entity, Hitbox, Hsla, InteractiveElement, IntoElement,
|
||||
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
|
||||
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad,
|
||||
ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size, Stateful,
|
||||
StatefulInteractiveElement, Style, Styled, TextRun, TextStyle, TextStyleRefinement, View,
|
||||
ViewContext, WindowContext,
|
||||
@@ -876,23 +876,29 @@ impl EditorElement {
|
||||
block_width = em_width;
|
||||
}
|
||||
let block_text = if let CursorShape::Block = selection.cursor_shape {
|
||||
snapshot
|
||||
.chars_at(cursor_position)
|
||||
.next()
|
||||
.and_then(|(character, _)| {
|
||||
snapshot.display_chars_at(cursor_position).next().and_then(
|
||||
|(character, _)| {
|
||||
let text = if character == '\n' {
|
||||
SharedString::from(" ")
|
||||
} else {
|
||||
SharedString::from(character.to_string())
|
||||
};
|
||||
let len = text.len();
|
||||
|
||||
let font = cursor_row_layout
|
||||
.font_id_for_index(cursor_column)
|
||||
.and_then(|cursor_font_id| {
|
||||
cx.text_system().get_font_for_id(cursor_font_id)
|
||||
})
|
||||
.unwrap_or(self.style.text.font());
|
||||
|
||||
cx.text_system()
|
||||
.shape_line(
|
||||
text,
|
||||
cursor_row_layout.font_size,
|
||||
&[TextRun {
|
||||
len,
|
||||
font: self.style.text.font(),
|
||||
font: font,
|
||||
color: self.style.background,
|
||||
background_color: None,
|
||||
strikethrough: None,
|
||||
@@ -900,7 +906,8 @@ impl EditorElement {
|
||||
}],
|
||||
)
|
||||
.log_err()
|
||||
})
|
||||
},
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -2371,150 +2378,15 @@ impl EditorElement {
|
||||
},
|
||||
cx.theme().colors().scrollbar_track_border,
|
||||
));
|
||||
let scrollbar_settings = EditorSettings::get_global(cx).scrollbar;
|
||||
let is_singleton = self.editor.read(cx).is_singleton(cx);
|
||||
let left = scrollbar_layout.hitbox.left();
|
||||
let right = scrollbar_layout.hitbox.right();
|
||||
let column_width =
|
||||
px(((right - left - ScrollbarLayout::BORDER_WIDTH).0 / 3.0).floor());
|
||||
if is_singleton && scrollbar_settings.selections {
|
||||
let start_anchor = Anchor::min();
|
||||
let end_anchor = Anchor::max();
|
||||
let background_ranges = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.background_highlight_row_ranges::<BufferSearchHighlights>(
|
||||
start_anchor..end_anchor,
|
||||
&layout.position_map.snapshot,
|
||||
50000,
|
||||
);
|
||||
let left_x = left + ScrollbarLayout::BORDER_WIDTH + column_width;
|
||||
let right_x = left_x + column_width;
|
||||
for range in background_ranges {
|
||||
let (start_y, end_y) =
|
||||
scrollbar_layout.ys_for_marker(range.start().row(), range.end().row());
|
||||
let bounds =
|
||||
Bounds::from_corners(point(left_x, start_y), point(right_x, end_y));
|
||||
cx.paint_quad(quad(
|
||||
bounds,
|
||||
Corners::default(),
|
||||
cx.theme().status().info,
|
||||
Edges::default(),
|
||||
cx.theme().colors().scrollbar_thumb_border,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if is_singleton && scrollbar_settings.symbols_selections {
|
||||
let selection_ranges = self.editor.read(cx).background_highlights_in_range(
|
||||
Anchor::min()..Anchor::max(),
|
||||
&layout.position_map.snapshot,
|
||||
cx.theme().colors(),
|
||||
);
|
||||
let left_x = left + ScrollbarLayout::BORDER_WIDTH + column_width;
|
||||
let right_x = left_x + column_width;
|
||||
for hunk in selection_ranges {
|
||||
let start_display = Point::new(hunk.0.start.row(), 0)
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot);
|
||||
let end_display = Point::new(hunk.0.end.row(), 0)
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot);
|
||||
let (start_y, end_y) =
|
||||
scrollbar_layout.ys_for_marker(start_display.row(), end_display.row());
|
||||
let bounds =
|
||||
Bounds::from_corners(point(left_x, start_y), point(right_x, end_y));
|
||||
cx.paint_quad(quad(
|
||||
bounds,
|
||||
Corners::default(),
|
||||
cx.theme().status().info,
|
||||
Edges::default(),
|
||||
cx.theme().colors().scrollbar_thumb_border,
|
||||
));
|
||||
}
|
||||
}
|
||||
// Refresh scrollbar markers in the background. Below, we paint whatever markers have already been computed.
|
||||
self.refresh_scrollbar_markers(layout, scrollbar_layout, cx);
|
||||
|
||||
if is_singleton && scrollbar_settings.git_diff {
|
||||
let left_x = left + ScrollbarLayout::BORDER_WIDTH;
|
||||
let right_x = left_x + column_width;
|
||||
for hunk in layout
|
||||
.position_map
|
||||
.snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range(0..layout.max_row)
|
||||
{
|
||||
let start_display_row = Point::new(hunk.associated_range.start, 0)
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot)
|
||||
.row();
|
||||
let mut end_display_row = Point::new(hunk.associated_range.end, 0)
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot)
|
||||
.row();
|
||||
if end_display_row != start_display_row {
|
||||
end_display_row -= 1;
|
||||
}
|
||||
let (start_y, end_y) =
|
||||
scrollbar_layout.ys_for_marker(start_display_row, end_display_row);
|
||||
let bounds =
|
||||
Bounds::from_corners(point(left_x, start_y), point(right_x, end_y));
|
||||
let color = match hunk.status() {
|
||||
DiffHunkStatus::Added => cx.theme().status().created,
|
||||
DiffHunkStatus::Modified => cx.theme().status().modified,
|
||||
DiffHunkStatus::Removed => cx.theme().status().deleted,
|
||||
};
|
||||
cx.paint_quad(quad(
|
||||
bounds,
|
||||
Corners::default(),
|
||||
color,
|
||||
Edges::default(),
|
||||
cx.theme().colors().scrollbar_thumb_border,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if is_singleton && scrollbar_settings.diagnostics {
|
||||
let max_point = layout
|
||||
.position_map
|
||||
.snapshot
|
||||
.display_snapshot
|
||||
.buffer_snapshot
|
||||
.max_point();
|
||||
|
||||
let diagnostics = layout
|
||||
.position_map
|
||||
.snapshot
|
||||
.buffer_snapshot
|
||||
.diagnostics_in_range::<_, Point>(Point::zero()..max_point, false)
|
||||
// We want to sort by severity, in order to paint the most severe diagnostics last.
|
||||
.sorted_by_key(|diagnostic| {
|
||||
std::cmp::Reverse(diagnostic.diagnostic.severity)
|
||||
});
|
||||
|
||||
let left_x = left + ScrollbarLayout::BORDER_WIDTH + 2.0 * column_width;
|
||||
for diagnostic in diagnostics {
|
||||
let start_display = diagnostic
|
||||
.range
|
||||
.start
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot);
|
||||
let end_display = diagnostic
|
||||
.range
|
||||
.end
|
||||
.to_display_point(&layout.position_map.snapshot.display_snapshot);
|
||||
let (start_y, end_y) =
|
||||
scrollbar_layout.ys_for_marker(start_display.row(), end_display.row());
|
||||
let bounds =
|
||||
Bounds::from_corners(point(left_x, start_y), point(right, end_y));
|
||||
let color = match diagnostic.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => cx.theme().status().error,
|
||||
DiagnosticSeverity::WARNING => cx.theme().status().warning,
|
||||
DiagnosticSeverity::INFORMATION => cx.theme().status().info,
|
||||
_ => cx.theme().status().hint,
|
||||
};
|
||||
cx.paint_quad(quad(
|
||||
bounds,
|
||||
Corners::default(),
|
||||
color,
|
||||
Edges::default(),
|
||||
cx.theme().colors().scrollbar_thumb_border,
|
||||
));
|
||||
}
|
||||
let markers = self.editor.read(cx).scrollbar_marker_state.markers.clone();
|
||||
for marker in markers.iter() {
|
||||
let mut marker = marker.clone();
|
||||
marker.bounds.origin += scrollbar_layout.hitbox.origin;
|
||||
cx.paint_quad(marker);
|
||||
}
|
||||
|
||||
cx.paint_quad(quad(
|
||||
@@ -2620,6 +2492,156 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
fn refresh_scrollbar_markers(
|
||||
&self,
|
||||
layout: &EditorLayout,
|
||||
scrollbar_layout: &ScrollbarLayout,
|
||||
cx: &mut ElementContext,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if !editor.is_singleton(cx)
|
||||
|| !editor
|
||||
.scrollbar_marker_state
|
||||
.should_refresh(scrollbar_layout.hitbox.size)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let scrollbar_layout = scrollbar_layout.clone();
|
||||
let background_highlights = editor.background_highlights.clone();
|
||||
let snapshot = layout.position_map.snapshot.clone();
|
||||
let theme = cx.theme().clone();
|
||||
let scrollbar_settings = EditorSettings::get_global(cx).scrollbar;
|
||||
let max_row = layout.max_row;
|
||||
|
||||
editor.scrollbar_marker_state.dirty = false;
|
||||
editor.scrollbar_marker_state.pending_refresh =
|
||||
Some(cx.spawn(|editor, mut cx| async move {
|
||||
let scrollbar_size = scrollbar_layout.hitbox.size;
|
||||
let scrollbar_markers = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let mut marker_quads = Vec::new();
|
||||
|
||||
if scrollbar_settings.git_diff {
|
||||
let marker_row_ranges = snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range(0..max_row)
|
||||
.map(|hunk| {
|
||||
let start_display_row =
|
||||
Point::new(hunk.associated_range.start, 0)
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
let mut end_display_row =
|
||||
Point::new(hunk.associated_range.end, 0)
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
if end_display_row != start_display_row {
|
||||
end_display_row -= 1;
|
||||
}
|
||||
let color = match hunk.status() {
|
||||
DiffHunkStatus::Added => theme.status().created,
|
||||
DiffHunkStatus::Modified => theme.status().modified,
|
||||
DiffHunkStatus::Removed => theme.status().deleted,
|
||||
};
|
||||
ColoredRange {
|
||||
start: start_display_row,
|
||||
end: end_display_row,
|
||||
color,
|
||||
}
|
||||
});
|
||||
|
||||
marker_quads.extend(
|
||||
scrollbar_layout.marker_quads_for_ranges(marker_row_ranges, 0),
|
||||
);
|
||||
}
|
||||
|
||||
for (background_highlight_id, (_, background_ranges)) in
|
||||
background_highlights.iter()
|
||||
{
|
||||
if (*background_highlight_id
|
||||
== TypeId::of::<BufferSearchHighlights>()
|
||||
&& scrollbar_settings.selections)
|
||||
|| scrollbar_settings.symbols_selections
|
||||
{
|
||||
let marker_row_ranges =
|
||||
background_ranges.into_iter().map(|range| {
|
||||
let display_start = range
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
let display_end = range
|
||||
.end
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
ColoredRange {
|
||||
start: display_start.row(),
|
||||
end: display_end.row(),
|
||||
color: theme.status().info,
|
||||
}
|
||||
});
|
||||
marker_quads.extend(
|
||||
scrollbar_layout
|
||||
.marker_quads_for_ranges(marker_row_ranges, 1),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if scrollbar_settings.diagnostics {
|
||||
let max_point =
|
||||
snapshot.display_snapshot.buffer_snapshot.max_point();
|
||||
|
||||
let diagnostics = snapshot
|
||||
.buffer_snapshot
|
||||
.diagnostics_in_range::<_, Point>(
|
||||
Point::zero()..max_point,
|
||||
false,
|
||||
)
|
||||
// We want to sort by severity, in order to paint the most severe diagnostics last.
|
||||
.sorted_by_key(|diagnostic| {
|
||||
std::cmp::Reverse(diagnostic.diagnostic.severity)
|
||||
});
|
||||
|
||||
let marker_row_ranges = diagnostics.into_iter().map(|diagnostic| {
|
||||
let start_display = diagnostic
|
||||
.range
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
let end_display = diagnostic
|
||||
.range
|
||||
.end
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
let color = match diagnostic.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => theme.status().error,
|
||||
DiagnosticSeverity::WARNING => theme.status().warning,
|
||||
DiagnosticSeverity::INFORMATION => theme.status().info,
|
||||
_ => theme.status().hint,
|
||||
};
|
||||
ColoredRange {
|
||||
start: start_display.row(),
|
||||
end: end_display.row(),
|
||||
color,
|
||||
}
|
||||
});
|
||||
marker_quads.extend(
|
||||
scrollbar_layout.marker_quads_for_ranges(marker_row_ranges, 2),
|
||||
);
|
||||
}
|
||||
|
||||
Arc::from(marker_quads)
|
||||
})
|
||||
.await;
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
editor.scrollbar_marker_state.markers = scrollbar_markers;
|
||||
editor.scrollbar_marker_state.scrollbar_size = scrollbar_size;
|
||||
editor.scrollbar_marker_state.pending_refresh = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn paint_highlighted_range(
|
||||
&self,
|
||||
@@ -3812,6 +3834,13 @@ impl EditorLayout {
|
||||
}
|
||||
}
|
||||
|
||||
struct ColoredRange<T> {
|
||||
start: T,
|
||||
end: T,
|
||||
color: Hsla,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ScrollbarLayout {
|
||||
hitbox: Hitbox,
|
||||
visible_row_range: Range<f32>,
|
||||
@@ -3839,13 +3868,60 @@ impl ScrollbarLayout {
|
||||
self.hitbox.top() + self.first_row_y_offset + row * self.row_height
|
||||
}
|
||||
|
||||
fn ys_for_marker(&self, start_row: u32, end_row: u32) -> (Pixels, Pixels) {
|
||||
let start_y = self.y_for_row(start_row as f32);
|
||||
let mut end_y = self.y_for_row((end_row + 1) as f32);
|
||||
if end_y - start_y < Self::MIN_MARKER_HEIGHT {
|
||||
end_y = start_y + Self::MIN_MARKER_HEIGHT;
|
||||
fn marker_quads_for_ranges(
|
||||
&self,
|
||||
row_ranges: impl IntoIterator<Item = ColoredRange<u32>>,
|
||||
column: usize,
|
||||
) -> Vec<PaintQuad> {
|
||||
let column_width =
|
||||
px(((self.hitbox.size.width - ScrollbarLayout::BORDER_WIDTH).0 / 3.0).floor());
|
||||
|
||||
let left_x = ScrollbarLayout::BORDER_WIDTH + (column as f32 * column_width);
|
||||
let right_x = left_x + column_width;
|
||||
|
||||
let mut background_pixel_ranges = row_ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let start_y = self.first_row_y_offset + range.start as f32 * self.row_height;
|
||||
let end_y = self.first_row_y_offset + (range.end + 1) as f32 * self.row_height;
|
||||
ColoredRange {
|
||||
start: start_y,
|
||||
end: end_y,
|
||||
color: range.color,
|
||||
}
|
||||
})
|
||||
.peekable();
|
||||
|
||||
let mut quads = Vec::new();
|
||||
while let Some(mut pixel_range) = background_pixel_ranges.next() {
|
||||
pixel_range.end = pixel_range
|
||||
.end
|
||||
.max(pixel_range.start + Self::MIN_MARKER_HEIGHT);
|
||||
while let Some(next_pixel_range) = background_pixel_ranges.peek() {
|
||||
if pixel_range.end >= next_pixel_range.start
|
||||
&& pixel_range.color == next_pixel_range.color
|
||||
{
|
||||
pixel_range.end = next_pixel_range.end.max(pixel_range.end);
|
||||
background_pixel_ranges.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let bounds = Bounds::from_corners(
|
||||
point(left_x, pixel_range.start),
|
||||
point(right_x, pixel_range.end),
|
||||
);
|
||||
quads.push(quad(
|
||||
bounds,
|
||||
Corners::default(),
|
||||
pixel_range.color,
|
||||
Edges::default(),
|
||||
Hsla::transparent_black(),
|
||||
));
|
||||
}
|
||||
(start_y, end_y)
|
||||
|
||||
quads
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4242,7 +4318,7 @@ mod tests {
|
||||
use gpui::TestAppContext;
|
||||
use language::language_settings;
|
||||
use log::info;
|
||||
use std::{num::NonZeroU32, sync::Arc};
|
||||
use std::num::NonZeroU32;
|
||||
use util::test::sample_text;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -4474,7 +4550,7 @@ mod tests {
|
||||
disposition: BlockDisposition::Above,
|
||||
height: 3,
|
||||
position: Anchor::min(),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
|
||||
@@ -20,7 +20,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
|
||||
.innermost_enclosing_bracket_ranges(head..head, None)
|
||||
{
|
||||
editor.highlight_background::<MatchingBracketHighlight>(
|
||||
vec![
|
||||
&[
|
||||
opening_range.to_anchors(&snapshot.buffer_snapshot),
|
||||
closing_range.to_anchors(&snapshot.buffer_snapshot),
|
||||
],
|
||||
|
||||
@@ -238,7 +238,9 @@ fn show_hover(
|
||||
let task = cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
// If we need to delay, delay a set amount initially before making the lsp request
|
||||
let delay = if !ignore_timeout {
|
||||
let delay = if ignore_timeout {
|
||||
None
|
||||
} else {
|
||||
// Construct delay task to wait for later
|
||||
let total_delay = Some(
|
||||
cx.background_executor()
|
||||
@@ -249,8 +251,6 @@ fn show_hover(
|
||||
.timer(Duration::from_millis(HOVER_REQUEST_DELAY_MILLIS))
|
||||
.await;
|
||||
total_delay
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// query the LSP for hover info
|
||||
@@ -342,7 +342,7 @@ fn show_hover(
|
||||
} else {
|
||||
// Highlight the selected symbol using a background highlight
|
||||
editor.highlight_background::<HoverState>(
|
||||
hover_highlights,
|
||||
&hover_highlights,
|
||||
|theme| theme.element_hover, // todo update theme
|
||||
cx,
|
||||
);
|
||||
@@ -375,12 +375,12 @@ async fn parse_blocks(
|
||||
match &block.kind {
|
||||
HoverBlockKind::PlainText => {
|
||||
markdown::new_paragraph(&mut text, &mut Vec::new());
|
||||
text.push_str(&block.text);
|
||||
text.push_str(&block.text.replace("\\n", "\n"));
|
||||
}
|
||||
|
||||
HoverBlockKind::Markdown => {
|
||||
markdown::parse_markdown_block(
|
||||
&block.text,
|
||||
&block.text.replace("\\n", "\n"),
|
||||
language_registry,
|
||||
language.clone(),
|
||||
&mut text,
|
||||
|
||||
@@ -19,7 +19,7 @@ use project::repository::GitFileStatus;
|
||||
use project::{search::SearchQuery, FormatTrigger, Item as _, Project, ProjectPath};
|
||||
use rpc::proto::{self, update_view, PeerId};
|
||||
use settings::Settings;
|
||||
use workspace::item::ItemSettings;
|
||||
use workspace::item::{ItemSettings, TabContentParams};
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
@@ -594,7 +594,7 @@ impl Item for Editor {
|
||||
Some(path.to_string_lossy().to_string().into())
|
||||
}
|
||||
|
||||
fn tab_content(&self, detail: Option<usize>, selected: bool, cx: &WindowContext) -> AnyElement {
|
||||
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
|
||||
let label_color = if ItemSettings::get_global(cx).git_status {
|
||||
self.buffer()
|
||||
.read(cx)
|
||||
@@ -602,14 +602,14 @@ impl Item for Editor {
|
||||
.and_then(|buffer| buffer.read(cx).project_path(cx))
|
||||
.and_then(|path| self.project.as_ref()?.read(cx).entry_for_path(&path, cx))
|
||||
.map(|entry| {
|
||||
entry_git_aware_label_color(entry.git_status, entry.is_ignored, selected)
|
||||
entry_git_aware_label_color(entry.git_status, entry.is_ignored, params.selected)
|
||||
})
|
||||
.unwrap_or_else(|| entry_label_color(selected))
|
||||
.unwrap_or_else(|| entry_label_color(params.selected))
|
||||
} else {
|
||||
entry_label_color(selected)
|
||||
entry_label_color(params.selected)
|
||||
};
|
||||
|
||||
let description = detail.and_then(|detail| {
|
||||
let description = params.detail.and_then(|detail| {
|
||||
let path = path_for_buffer(&self.buffer, detail, false, cx)?;
|
||||
let description = path.to_string_lossy();
|
||||
let description = description.trim();
|
||||
@@ -623,7 +623,11 @@ impl Item for Editor {
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new(self.title(cx).to_string()).color(label_color))
|
||||
.child(
|
||||
Label::new(self.title(cx).to_string())
|
||||
.color(label_color)
|
||||
.italic(params.preview),
|
||||
)
|
||||
.when_some(description, |this, description| {
|
||||
this.child(
|
||||
Label::new(description)
|
||||
@@ -705,31 +709,38 @@ impl Item for Editor {
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Only format and save the buffers with changes. For clean buffers,
|
||||
// we simulate saving by calling `Buffer::did_save`, so that language servers or
|
||||
// other downstream listeners of save events get notified.
|
||||
let (dirty_buffers, clean_buffers) = buffers.into_iter().partition(|buffer| {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.is_dirty() || buffer.has_conflict()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
});
|
||||
if buffers.len() == 1 {
|
||||
// Apply full save routine for singleton buffers, to allow to `touch` the file via the editor.
|
||||
project
|
||||
.update(&mut cx, |project, cx| project.save_buffers(buffers, cx))?
|
||||
.await?;
|
||||
} else {
|
||||
// For multi-buffers, only format and save the buffers with changes.
|
||||
// For clean buffers, we simulate saving by calling `Buffer::did_save`,
|
||||
// so that language servers or other downstream listeners of save events get notified.
|
||||
let (dirty_buffers, clean_buffers) = buffers.into_iter().partition(|buffer| {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.is_dirty() || buffer.has_conflict()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.save_buffers(dirty_buffers, cx)
|
||||
})?
|
||||
.await?;
|
||||
for buffer in clean_buffers {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, cx| {
|
||||
let version = buffer.saved_version().clone();
|
||||
let fingerprint = buffer.saved_version_fingerprint();
|
||||
let mtime = buffer.saved_mtime();
|
||||
buffer.did_save(version, fingerprint, mtime, cx);
|
||||
})
|
||||
.ok();
|
||||
project
|
||||
.update(&mut cx, |project, cx| {
|
||||
project.save_buffers(dirty_buffers, cx)
|
||||
})?
|
||||
.await?;
|
||||
for buffer in clean_buffers {
|
||||
buffer
|
||||
.update(&mut cx, |buffer, cx| {
|
||||
let version = buffer.saved_version().clone();
|
||||
let fingerprint = buffer.saved_version_fingerprint();
|
||||
let mtime = buffer.saved_mtime();
|
||||
buffer.did_save(version, fingerprint, mtime, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -976,7 +987,7 @@ impl SearchableItem for Editor {
|
||||
self.clear_background_highlights::<BufferSearchHighlights>(cx);
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, matches: Vec<Range<Anchor>>, cx: &mut ViewContext<Self>) {
|
||||
fn update_matches(&mut self, matches: &[Range<Anchor>], cx: &mut ViewContext<Self>) {
|
||||
self.highlight_background::<BufferSearchHighlights>(
|
||||
matches,
|
||||
|theme| theme.search_match_background,
|
||||
@@ -1013,7 +1024,7 @@ impl SearchableItem for Editor {
|
||||
fn activate_match(
|
||||
&mut self,
|
||||
index: usize,
|
||||
matches: Vec<Range<Anchor>>,
|
||||
matches: &[Range<Anchor>],
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.unfold_ranges([matches[index].clone()], false, true, cx);
|
||||
@@ -1023,10 +1034,10 @@ impl SearchableItem for Editor {
|
||||
})
|
||||
}
|
||||
|
||||
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
|
||||
self.unfold_ranges(matches.clone(), false, false, cx);
|
||||
fn select_matches(&mut self, matches: &[Self::Match], cx: &mut ViewContext<Self>) {
|
||||
self.unfold_ranges(matches.to_vec(), false, false, cx);
|
||||
let mut ranges = Vec::new();
|
||||
for m in &matches {
|
||||
for m in matches {
|
||||
ranges.push(self.range_for_match(&m))
|
||||
}
|
||||
self.change_selections(None, cx, |s| s.select_ranges(ranges));
|
||||
@@ -1055,7 +1066,7 @@ impl SearchableItem for Editor {
|
||||
}
|
||||
fn match_index_for_direction(
|
||||
&mut self,
|
||||
matches: &Vec<Range<Anchor>>,
|
||||
matches: &[Range<Anchor>],
|
||||
current_index: usize,
|
||||
direction: Direction,
|
||||
count: usize,
|
||||
@@ -1147,11 +1158,11 @@ impl SearchableItem for Editor {
|
||||
|
||||
fn active_match_index(
|
||||
&mut self,
|
||||
matches: Vec<Range<Anchor>>,
|
||||
matches: &[Range<Anchor>],
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<usize> {
|
||||
active_match_index(
|
||||
&matches,
|
||||
matches,
|
||||
&self.selections.newest_anchor().head(),
|
||||
&self.buffer().read(cx).snapshot(cx),
|
||||
)
|
||||
|
||||
@@ -6,13 +6,14 @@ use crate::{char_kind, scroll::ScrollAnchor, CharKind, EditorStyle, ToOffset, To
|
||||
use gpui::{px, Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use multi_buffer::MultiBufferSnapshot;
|
||||
use serde::Deserialize;
|
||||
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
/// Defines search strategy for items in `movement` module.
|
||||
/// `FindRange::SingeLine` only looks for a match on a single line at a time, whereas
|
||||
/// `FindRange::MultiLine` keeps going until the end of a string.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize)]
|
||||
pub enum FindRange {
|
||||
SingleLine,
|
||||
MultiLine,
|
||||
@@ -569,7 +570,6 @@ mod tests {
|
||||
use language::Capability;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
use text::BufferId;
|
||||
use util::post_inc;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -869,13 +869,7 @@ mod tests {
|
||||
|
||||
let font = font("Helvetica");
|
||||
|
||||
let buffer = cx.new_model(|cx| {
|
||||
Buffer::new(
|
||||
0,
|
||||
BufferId::new(cx.entity_id().as_u64()).unwrap(),
|
||||
"abc\ndefg\nhijkl\nmn",
|
||||
)
|
||||
});
|
||||
let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx));
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
|
||||
@@ -45,11 +45,11 @@ impl ScrollAnchor {
|
||||
|
||||
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<f32> {
|
||||
let mut scroll_position = self.offset;
|
||||
if self.anchor != Anchor::min() {
|
||||
if self.anchor == Anchor::min() {
|
||||
scroll_position.y = 0.;
|
||||
} else {
|
||||
let scroll_top = self.anchor.to_display_point(snapshot).row() as f32;
|
||||
scroll_position.y = scroll_top + scroll_position.y;
|
||||
} else {
|
||||
scroll_position.y = 0.;
|
||||
}
|
||||
scroll_position
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ use std::{
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use text::BufferId;
|
||||
use ui::Context;
|
||||
use util::{
|
||||
assert_set_eq,
|
||||
@@ -76,10 +75,9 @@ impl EditorTestContext {
|
||||
) -> EditorTestContext {
|
||||
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
|
||||
let buffer = cx.new_model(|cx| {
|
||||
for (i, excerpt) in excerpts.into_iter().enumerate() {
|
||||
for excerpt in excerpts.into_iter() {
|
||||
let (text, ranges) = marked_text_ranges(excerpt, false);
|
||||
let buffer =
|
||||
cx.new_model(|_| Buffer::new(0, BufferId::new(i as u64 + 1).unwrap(), text));
|
||||
let buffer = cx.new_model(|cx| Buffer::local(text, cx));
|
||||
multibuffer.push_excerpts(
|
||||
buffer,
|
||||
ranges.into_iter().map(|range| ExcerptRange {
|
||||
@@ -345,7 +343,7 @@ impl EditorTestContext {
|
||||
.background_highlights
|
||||
.get(&TypeId::of::<Tag>())
|
||||
.map(|h| h.1.clone())
|
||||
.unwrap_or_default()
|
||||
.unwrap_or_else(|| Arc::from([]))
|
||||
.into_iter()
|
||||
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
|
||||
.collect()
|
||||
|
||||
@@ -23,6 +23,7 @@ collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
isahc.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
|
||||
@@ -1,21 +1,31 @@
|
||||
use crate::wasm_host::{wit::LanguageServerConfig, WasmExtension, WasmHost};
|
||||
use crate::wasm_host::{
|
||||
wit::{self, LanguageServerConfig},
|
||||
WasmExtension, WasmHost,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{Language, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::LanguageServerBinary;
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::ops::Range;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{maybe, ResultExt};
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
pub struct ExtensionLspAdapter {
|
||||
pub(crate) extension: WasmExtension,
|
||||
pub(crate) language_server_id: LanguageServerName,
|
||||
pub(crate) config: LanguageServerConfig,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
}
|
||||
@@ -43,7 +53,12 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let command = extension
|
||||
.call_language_server_command(store, &this.config, resource)
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(command)
|
||||
@@ -64,7 +79,9 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
// We can remove once the following extension versions no longer see any use:
|
||||
// - toml@0.0.2
|
||||
// - zig@0.0.1
|
||||
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref()) {
|
||||
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref())
|
||||
&& path.starts_with(&self.host.work_dir)
|
||||
{
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
use std::fs::{self, Permissions};
|
||||
@@ -146,6 +163,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
let options = extension
|
||||
.call_language_server_initialization_options(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
@@ -165,4 +183,394 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let delegate = delegate.clone();
|
||||
let json_options: Option<String> = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let options = extension
|
||||
.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
Ok(if let Some(json_options) = json_options {
|
||||
serde_json::from_str(&json_options).with_context(|| {
|
||||
format!("failed to parse initialization_options from extension: {json_options}")
|
||||
})?
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
})
|
||||
}
|
||||
|
||||
async fn labels_for_completions(
|
||||
self: Arc<Self>,
|
||||
completions: &[lsp::CompletionItem],
|
||||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let completions = completions
|
||||
.into_iter()
|
||||
.map(|completion| wit::Completion::from(completion.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_completions(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
completions,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
}
|
||||
|
||||
async fn labels_for_symbols(
|
||||
self: Arc<Self>,
|
||||
symbols: &[(String, lsp::SymbolKind)],
|
||||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let symbols = symbols
|
||||
.into_iter()
|
||||
.cloned()
|
||||
.map(|(name, kind)| wit::Symbol {
|
||||
name,
|
||||
kind: kind.into(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_symbols(store, &this.language_server_id, symbols)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
}
|
||||
}
|
||||
|
||||
fn labels_from_wit(
|
||||
labels: Vec<Option<wit::CodeLabel>>,
|
||||
language: &Arc<Language>,
|
||||
) -> Vec<Option<CodeLabel>> {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| {
|
||||
let label = label?;
|
||||
let runs = if label.code.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
|
||||
};
|
||||
build_code_label(&label, &runs, &language)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn build_code_label(
|
||||
label: &wit::CodeLabel,
|
||||
parsed_runs: &[(Range<usize>, HighlightId)],
|
||||
language: &Arc<Language>,
|
||||
) -> Option<CodeLabel> {
|
||||
let mut text = String::new();
|
||||
let mut runs = vec![];
|
||||
|
||||
for span in &label.spans {
|
||||
match span {
|
||||
wit::CodeLabelSpan::CodeRange(range) => {
|
||||
let range = Range::from(*range);
|
||||
let code_span = &label.code.get(range.clone())?;
|
||||
let mut input_ix = range.start;
|
||||
let mut output_ix = text.len();
|
||||
for (run_range, id) in parsed_runs {
|
||||
if run_range.start >= range.end {
|
||||
break;
|
||||
}
|
||||
if run_range.end <= input_ix {
|
||||
continue;
|
||||
}
|
||||
|
||||
if run_range.start > input_ix {
|
||||
let len = run_range.start - input_ix;
|
||||
output_ix += len;
|
||||
input_ix += len;
|
||||
}
|
||||
|
||||
let len = range.end.min(run_range.end) - input_ix;
|
||||
runs.push((output_ix..output_ix + len, *id));
|
||||
output_ix += len;
|
||||
input_ix += len;
|
||||
}
|
||||
|
||||
text.push_str(code_span);
|
||||
}
|
||||
wit::CodeLabelSpan::Literal(span) => {
|
||||
let highlight_id = language
|
||||
.grammar()
|
||||
.zip(span.highlight_name.as_ref())
|
||||
.and_then(|(grammar, highlight_name)| {
|
||||
grammar.highlight_id_for_name(&highlight_name)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let ix = text.len();
|
||||
runs.push((ix..ix + span.text.len(), highlight_id));
|
||||
text.push_str(&span.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let filter_range = Range::from(label.filter_range);
|
||||
text.get(filter_range.clone())?;
|
||||
Some(CodeLabel {
|
||||
text,
|
||||
runs,
|
||||
filter_range,
|
||||
})
|
||||
}
|
||||
|
||||
impl From<wit::Range> for Range<usize> {
|
||||
fn from(range: wit::Range) -> Self {
|
||||
let start = range.start as usize;
|
||||
let end = range.end as usize;
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItem> for wit::Completion {
|
||||
fn from(value: lsp::CompletionItem) -> Self {
|
||||
Self {
|
||||
label: value.label,
|
||||
detail: value.detail,
|
||||
kind: value.kind.map(Into::into),
|
||||
insert_text_format: value.insert_text_format.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItemKind> for wit::CompletionKind {
|
||||
fn from(value: lsp::CompletionItemKind) -> Self {
|
||||
match value {
|
||||
lsp::CompletionItemKind::TEXT => Self::Text,
|
||||
lsp::CompletionItemKind::METHOD => Self::Method,
|
||||
lsp::CompletionItemKind::FUNCTION => Self::Function,
|
||||
lsp::CompletionItemKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::CompletionItemKind::FIELD => Self::Field,
|
||||
lsp::CompletionItemKind::VARIABLE => Self::Variable,
|
||||
lsp::CompletionItemKind::CLASS => Self::Class,
|
||||
lsp::CompletionItemKind::INTERFACE => Self::Interface,
|
||||
lsp::CompletionItemKind::MODULE => Self::Module,
|
||||
lsp::CompletionItemKind::PROPERTY => Self::Property,
|
||||
lsp::CompletionItemKind::UNIT => Self::Unit,
|
||||
lsp::CompletionItemKind::VALUE => Self::Value,
|
||||
lsp::CompletionItemKind::ENUM => Self::Enum,
|
||||
lsp::CompletionItemKind::KEYWORD => Self::Keyword,
|
||||
lsp::CompletionItemKind::SNIPPET => Self::Snippet,
|
||||
lsp::CompletionItemKind::COLOR => Self::Color,
|
||||
lsp::CompletionItemKind::FILE => Self::File,
|
||||
lsp::CompletionItemKind::REFERENCE => Self::Reference,
|
||||
lsp::CompletionItemKind::FOLDER => Self::Folder,
|
||||
lsp::CompletionItemKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::CompletionItemKind::CONSTANT => Self::Constant,
|
||||
lsp::CompletionItemKind::STRUCT => Self::Struct,
|
||||
lsp::CompletionItemKind::EVENT => Self::Event,
|
||||
lsp::CompletionItemKind::OPERATOR => Self::Operator,
|
||||
lsp::CompletionItemKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::InsertTextFormat> for wit::InsertTextFormat {
|
||||
fn from(value: lsp::InsertTextFormat) -> Self {
|
||||
match value {
|
||||
lsp::InsertTextFormat::PLAIN_TEXT => Self::PlainText,
|
||||
lsp::InsertTextFormat::SNIPPET => Self::Snippet,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::SymbolKind> for wit::SymbolKind {
|
||||
fn from(value: lsp::SymbolKind) -> Self {
|
||||
match value {
|
||||
lsp::SymbolKind::FILE => Self::File,
|
||||
lsp::SymbolKind::MODULE => Self::Module,
|
||||
lsp::SymbolKind::NAMESPACE => Self::Namespace,
|
||||
lsp::SymbolKind::PACKAGE => Self::Package,
|
||||
lsp::SymbolKind::CLASS => Self::Class,
|
||||
lsp::SymbolKind::METHOD => Self::Method,
|
||||
lsp::SymbolKind::PROPERTY => Self::Property,
|
||||
lsp::SymbolKind::FIELD => Self::Field,
|
||||
lsp::SymbolKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::SymbolKind::ENUM => Self::Enum,
|
||||
lsp::SymbolKind::INTERFACE => Self::Interface,
|
||||
lsp::SymbolKind::FUNCTION => Self::Function,
|
||||
lsp::SymbolKind::VARIABLE => Self::Variable,
|
||||
lsp::SymbolKind::CONSTANT => Self::Constant,
|
||||
lsp::SymbolKind::STRING => Self::String,
|
||||
lsp::SymbolKind::NUMBER => Self::Number,
|
||||
lsp::SymbolKind::BOOLEAN => Self::Boolean,
|
||||
lsp::SymbolKind::ARRAY => Self::Array,
|
||||
lsp::SymbolKind::OBJECT => Self::Object,
|
||||
lsp::SymbolKind::KEY => Self::Key,
|
||||
lsp::SymbolKind::NULL => Self::Null,
|
||||
lsp::SymbolKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::SymbolKind::STRUCT => Self::Struct,
|
||||
lsp::SymbolKind::EVENT => Self::Event,
|
||||
lsp::SymbolKind::OPERATOR => Self::Operator,
|
||||
lsp::SymbolKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_int<T: Serialize>(value: T) -> i32 {
|
||||
maybe!({
|
||||
let kind = serde_json::to_value(&value)?;
|
||||
serde_json::from_value(kind)
|
||||
})
|
||||
.log_err()
|
||||
.unwrap_or(-1)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_code_label() {
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
let (code, code_ranges) = marked_text_ranges(
|
||||
"«const» «a»: «fn»(«Bcd»(«Efgh»)) -> «Ijklm» = pqrs.tuv",
|
||||
false,
|
||||
);
|
||||
let code_runs = code_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find("pqrs").unwrap() as u32,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find(": fn").unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "pqrs.tuv".len() as u32,
|
||||
},
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (label_text, label_ranges) =
|
||||
marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false);
|
||||
let label_runs = label_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
label,
|
||||
CodeLabel {
|
||||
text: label_text,
|
||||
runs: label_runs,
|
||||
filter_range: label.filter_range.clone()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_code_label_with_invalid_ranges() {
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false);
|
||||
let code_runs = code_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// A span uses a code range that is invalid because it starts inside of
|
||||
// a multi-byte character.
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('B').unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('🏀').unwrap() as u32 + 1,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "B".len() as u32,
|
||||
},
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
);
|
||||
assert!(label.is_none());
|
||||
|
||||
// Filter range extends beyond actual text
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![wit::CodeLabelSpan::Literal(wit::CodeLabelSpanLiteral {
|
||||
text: "abc".into(),
|
||||
highlight_name: Some("type".into()),
|
||||
})],
|
||||
filter_range: wit::Range { start: 0, end: 5 },
|
||||
code: String::new(),
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
);
|
||||
assert!(label.is_none());
|
||||
}
|
||||
|
||||
@@ -98,11 +98,34 @@ pub struct GrammarManifestEntry {
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct LanguageServerManifestEntry {
|
||||
pub language: Arc<str>,
|
||||
/// Deprecated in favor of `languages`.
|
||||
#[serde(default)]
|
||||
language: Option<Arc<str>>,
|
||||
/// The list of languages this language server should work with.
|
||||
#[serde(default)]
|
||||
languages: Vec<Arc<str>>,
|
||||
#[serde(default)]
|
||||
pub language_ids: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl LanguageServerManifestEntry {
|
||||
/// Returns the list of languages for the language server.
|
||||
///
|
||||
/// Prefer this over accessing the `language` or `languages` fields directly,
|
||||
/// as we currently support both.
|
||||
///
|
||||
/// We can replace this with just field access for the `languages` field once
|
||||
/// we have removed `language`.
|
||||
pub fn languages(&self) -> impl IntoIterator<Item = Arc<str>> + '_ {
|
||||
let language = if self.languages.is_empty() {
|
||||
self.language.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.languages.iter().cloned().chain(language)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExtensionManifest {
|
||||
pub async fn load(fs: Arc<dyn Fs>, extension_dir: &Path) -> Result<Self> {
|
||||
let extension_name = extension_dir
|
||||
|
||||
@@ -3,7 +3,7 @@ use collections::HashMap;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use settings::{Settings, SettingsSources};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
@@ -26,14 +26,7 @@ impl Settings for ExtensionSettings {
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(
|
||||
_default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_cx: &mut AppContext,
|
||||
) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Ok(user_values.get(0).copied().cloned().unwrap_or_default())
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
|
||||
Ok(sources.user.cloned().unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,6 +237,7 @@ impl ExtensionStore {
|
||||
node_runtime,
|
||||
language_registry.clone(),
|
||||
work_dir,
|
||||
cx,
|
||||
),
|
||||
wasm_extensions: Vec::new(),
|
||||
fs,
|
||||
@@ -605,7 +606,22 @@ impl ExtensionStore {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
let content_length = response
|
||||
.headers()
|
||||
.get(isahc::http::header::CONTENT_LENGTH)
|
||||
.and_then(|value| value.to_str().ok()?.parse::<usize>().ok());
|
||||
|
||||
let mut body = BufReader::new(response.body_mut());
|
||||
let mut tar_gz_bytes = Vec::new();
|
||||
body.read_to_end(&mut tar_gz_bytes).await?;
|
||||
|
||||
if let Some(content_length) = content_length {
|
||||
let actual_len = tar_gz_bytes.len();
|
||||
if content_length != actual_len {
|
||||
bail!("downloaded extension size {actual_len} does not match content length {content_length}");
|
||||
}
|
||||
}
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(tar_gz_bytes.as_slice()));
|
||||
let archive = Archive::new(decompressed_bytes);
|
||||
archive.unpack(extension_dir).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
@@ -961,8 +977,10 @@ impl ExtensionStore {
|
||||
};
|
||||
grammars_to_remove.extend(extension.manifest.grammars.keys().cloned());
|
||||
for (language_server_name, config) in extension.manifest.language_servers.iter() {
|
||||
self.language_registry
|
||||
.remove_lsp_adapter(config.language.as_ref(), language_server_name);
|
||||
for language in config.languages() {
|
||||
self.language_registry
|
||||
.remove_lsp_adapter(&language, language_server_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1101,19 +1119,21 @@ impl ExtensionStore {
|
||||
this.reload_complete_senders.clear();
|
||||
|
||||
for (manifest, wasm_extension) in &wasm_extensions {
|
||||
for (language_server_name, language_server_config) in &manifest.language_servers
|
||||
{
|
||||
this.language_registry.register_lsp_adapter(
|
||||
language_server_config.language.clone(),
|
||||
Arc::new(ExtensionLspAdapter {
|
||||
extension: wasm_extension.clone(),
|
||||
host: this.wasm_host.clone(),
|
||||
config: wit::LanguageServerConfig {
|
||||
name: language_server_name.0.to_string(),
|
||||
language_name: language_server_config.language.to_string(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
for (language_server_id, language_server_config) in &manifest.language_servers {
|
||||
for language in language_server_config.languages() {
|
||||
this.language_registry.register_lsp_adapter(
|
||||
language.clone(),
|
||||
Arc::new(ExtensionLspAdapter {
|
||||
extension: wasm_extension.clone(),
|
||||
host: this.wasm_host.clone(),
|
||||
language_server_id: language_server_id.clone(),
|
||||
config: wit::LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language.to_string(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.wasm_extensions.extend(wasm_extensions);
|
||||
|
||||
@@ -619,6 +619,53 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
|
||||
]
|
||||
);
|
||||
|
||||
// The extension creates custom labels for completion items.
|
||||
fake_server.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
label: "foo".into(),
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
detail: Some("fn() -> Result(Nil, Error)".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "bar.baz".into(),
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
detail: Some("fn(List(a)) -> a".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "Quux".into(),
|
||||
kind: Some(lsp::CompletionItemKind::CONSTRUCTOR),
|
||||
detail: Some("fn(String) -> T".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "my_string".into(),
|
||||
kind: Some(lsp::CompletionItemKind::CONSTANT),
|
||||
detail: Some("String".into()),
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
});
|
||||
|
||||
let completion_labels = project
|
||||
.update(cx, |project, cx| project.completions(&buffer, 0, cx))
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|c| c.label.text)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
completion_labels,
|
||||
[
|
||||
"foo: fn() -> Result(Nil, Error)".to_string(),
|
||||
"bar.baz: fn(List(a)) -> a".to_string(),
|
||||
"Quux: fn(String) -> T".to_string(),
|
||||
"my_string: String".to_string(),
|
||||
]
|
||||
);
|
||||
|
||||
// Simulate a new version of the language server being released
|
||||
language_server_version.lock().version = "v2.0.0".into();
|
||||
language_server_version.lock().binary_contents = "the-new-binary-contents".into();
|
||||
|
||||
@@ -3,6 +3,7 @@ pub(crate) mod wit;
|
||||
use crate::ExtensionManifest;
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use fs::{normalize_path, Fs};
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::{
|
||||
channel::{
|
||||
mpsc::{self, UnboundedSender},
|
||||
@@ -11,7 +12,7 @@ use futures::{
|
||||
future::BoxFuture,
|
||||
Future, FutureExt, StreamExt as _,
|
||||
};
|
||||
use gpui::BackgroundExecutor;
|
||||
use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::NodeRuntime;
|
||||
use semantic_version::SemanticVersion;
|
||||
@@ -34,6 +35,8 @@ pub(crate) struct WasmHost {
|
||||
pub(crate) language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
pub(crate) work_dir: PathBuf,
|
||||
_main_thread_message_task: Task<()>,
|
||||
main_thread_message_tx: mpsc::UnboundedSender<MainThreadCall>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -51,6 +54,9 @@ pub(crate) struct WasmState {
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
}
|
||||
|
||||
type MainThreadCall =
|
||||
Box<dyn Send + for<'a> FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, ()>>;
|
||||
|
||||
type ExtensionCall = Box<
|
||||
dyn Send + for<'a> FnOnce(&'a mut Extension, &'a mut Store<WasmState>) -> BoxFuture<'a, ()>,
|
||||
>;
|
||||
@@ -75,7 +81,14 @@ impl WasmHost {
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
work_dir: PathBuf,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<Self> {
|
||||
let (tx, mut rx) = mpsc::unbounded::<MainThreadCall>();
|
||||
let task = cx.spawn(|mut cx| async move {
|
||||
while let Some(message) = rx.next().await {
|
||||
message(&mut cx).await;
|
||||
}
|
||||
});
|
||||
Arc::new(Self {
|
||||
engine: wasm_engine(),
|
||||
fs,
|
||||
@@ -83,6 +96,8 @@ impl WasmHost {
|
||||
http_client,
|
||||
node_runtime,
|
||||
language_registry,
|
||||
_main_thread_message_task: task,
|
||||
main_thread_message_tx: tx,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -183,13 +198,15 @@ pub fn parse_wasm_extension_version(
|
||||
extension_id: &str,
|
||||
wasm_bytes: &[u8],
|
||||
) -> Result<SemanticVersion> {
|
||||
let mut version = None;
|
||||
|
||||
for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) {
|
||||
if let wasmparser::Payload::CustomSection(s) = part? {
|
||||
if let wasmparser::Payload::CustomSection(s) =
|
||||
part.context("error parsing wasm extension")?
|
||||
{
|
||||
if s.name() == "zed:api-version" {
|
||||
let version = parse_wasm_extension_version_custom_section(s.data());
|
||||
if let Some(version) = version {
|
||||
return Ok(version);
|
||||
} else {
|
||||
version = parse_wasm_extension_version_custom_section(s.data());
|
||||
if version.is_none() {
|
||||
bail!(
|
||||
"extension {} has invalid zed:api-version section: {:?}",
|
||||
extension_id,
|
||||
@@ -199,7 +216,13 @@ pub fn parse_wasm_extension_version(
|
||||
}
|
||||
}
|
||||
}
|
||||
bail!("extension {} has no zed:api-version section", extension_id)
|
||||
|
||||
// The reason we wait until we're done parsing all of the Wasm bytes to return the version
|
||||
// is to work around a panic that can happen inside of Wasmtime when the bytes are invalid.
|
||||
//
|
||||
// By parsing the entirety of the Wasm bytes before we return, we're able to detect this problem
|
||||
// earlier as an `Err` rather than as a panic.
|
||||
version.ok_or_else(|| anyhow!("extension {} has no zed:api-version section", extension_id))
|
||||
}
|
||||
|
||||
fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option<SemanticVersion> {
|
||||
@@ -238,6 +261,26 @@ impl WasmExtension {
|
||||
}
|
||||
|
||||
impl WasmState {
|
||||
fn on_main_thread<T, Fn>(&self, f: Fn) -> impl 'static + Future<Output = T>
|
||||
where
|
||||
T: 'static + Send,
|
||||
Fn: 'static + Send + for<'a> FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, T>,
|
||||
{
|
||||
let (return_tx, return_rx) = oneshot::channel();
|
||||
self.host
|
||||
.main_thread_message_tx
|
||||
.clone()
|
||||
.unbounded_send(Box::new(move |cx| {
|
||||
async {
|
||||
let result = f(cx).await;
|
||||
return_tx.send(result).ok();
|
||||
}
|
||||
.boxed_local()
|
||||
}))
|
||||
.expect("main thread message channel should not be closed yet");
|
||||
async move { return_rx.await.expect("main thread message channel") }
|
||||
}
|
||||
|
||||
fn work_dir(&self) -> PathBuf {
|
||||
self.host.work_dir.join(self.manifest.id.as_ref())
|
||||
}
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
mod since_v0_0_1;
|
||||
mod since_v0_0_4;
|
||||
mod since_v0_0_6;
|
||||
use since_v0_0_6 as latest;
|
||||
|
||||
use super::{wasm_engine, WasmState};
|
||||
use anyhow::{Context, Result};
|
||||
use language::LspAdapterDelegate;
|
||||
use language::{LanguageServerName, LspAdapterDelegate};
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::sync::Arc;
|
||||
use std::{ops::RangeInclusive, sync::Arc};
|
||||
use wasmtime::{
|
||||
component::{Component, Instance, Linker, Resource},
|
||||
Store,
|
||||
};
|
||||
|
||||
use since_v0_0_4 as latest;
|
||||
|
||||
pub use latest::{Command, LanguageServerConfig};
|
||||
#[cfg(test)]
|
||||
pub use latest::CodeLabelSpanLiteral;
|
||||
pub use latest::{
|
||||
zed::extension::lsp::{Completion, CompletionKind, InsertTextFormat, Symbol, SymbolKind},
|
||||
CodeLabel, CodeLabelSpan, Command, Range,
|
||||
};
|
||||
pub use since_v0_0_4::LanguageServerConfig;
|
||||
|
||||
pub fn new_linker(
|
||||
f: impl Fn(&mut Linker<WasmState>, fn(&mut WasmState) -> &mut WasmState) -> Result<()>,
|
||||
@@ -41,6 +46,7 @@ pub fn wasm_api_version_range() -> RangeInclusive<SemanticVersion> {
|
||||
}
|
||||
|
||||
pub enum Extension {
|
||||
V006(since_v0_0_6::Extension),
|
||||
V004(since_v0_0_4::Extension),
|
||||
V001(since_v0_0_1::Extension),
|
||||
}
|
||||
@@ -51,16 +57,13 @@ impl Extension {
|
||||
version: SemanticVersion,
|
||||
component: &Component,
|
||||
) -> Result<(Self, Instance)> {
|
||||
if version < latest::MIN_VERSION {
|
||||
let (extension, instance) = since_v0_0_1::Extension::instantiate_async(
|
||||
store,
|
||||
&component,
|
||||
since_v0_0_1::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok((Self::V001(extension), instance))
|
||||
} else {
|
||||
if version >= latest::MIN_VERSION {
|
||||
let (extension, instance) =
|
||||
latest::Extension::instantiate_async(store, &component, latest::linker())
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok((Self::V006(extension), instance))
|
||||
} else if version >= since_v0_0_4::MIN_VERSION {
|
||||
let (extension, instance) = since_v0_0_4::Extension::instantiate_async(
|
||||
store,
|
||||
&component,
|
||||
@@ -69,11 +72,21 @@ impl Extension {
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok((Self::V004(extension), instance))
|
||||
} else {
|
||||
let (extension, instance) = since_v0_0_1::Extension::instantiate_async(
|
||||
store,
|
||||
&component,
|
||||
since_v0_0_1::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok((Self::V001(extension), instance))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_init_extension(&self, store: &mut Store<WasmState>) -> Result<()> {
|
||||
match self {
|
||||
Extension::V006(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V004(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V001(ext) => ext.call_init_extension(store).await,
|
||||
}
|
||||
@@ -82,14 +95,19 @@ impl Extension {
|
||||
pub async fn call_language_server_command(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Command, String>> {
|
||||
match self {
|
||||
Extension::V004(ext) => {
|
||||
ext.call_language_server_command(store, config, resource)
|
||||
Extension::V006(ext) => {
|
||||
ext.call_language_server_command(store, &language_server_id.0, resource)
|
||||
.await
|
||||
}
|
||||
Extension::V004(ext) => Ok(ext
|
||||
.call_language_server_command(store, config, resource)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V001(ext) => Ok(ext
|
||||
.call_language_server_command(store, &config.clone().into(), resource)
|
||||
.await?
|
||||
@@ -100,10 +118,19 @@ impl Extension {
|
||||
pub async fn call_language_server_initialization_options(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V006(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V004(ext) => {
|
||||
ext.call_language_server_initialization_options(store, config, resource)
|
||||
.await
|
||||
@@ -118,6 +145,55 @@ impl Extension {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_language_server_workspace_configuration(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V006(ext) => {
|
||||
ext.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V004(_) | Extension::V001(_) => Ok(Ok(None)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_labels_for_completions(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
completions: Vec<latest::Completion>,
|
||||
) -> Result<Result<Vec<Option<CodeLabel>>, String>> {
|
||||
match self {
|
||||
Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())),
|
||||
Extension::V006(ext) => {
|
||||
ext.call_labels_for_completions(store, &language_server_id.0, &completions)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_labels_for_symbols(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
symbols: Vec<latest::Symbol>,
|
||||
) -> Result<Result<Vec<Option<CodeLabel>>, String>> {
|
||||
match self {
|
||||
Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())),
|
||||
Extension::V006(ext) => {
|
||||
ext.call_labels_for_symbols(store, &language_server_id.0, &symbols)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ToWasmtimeResult<T> {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::latest;
|
||||
use crate::wasm_host::wit::since_v0_0_4;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
@@ -14,6 +15,8 @@ wasmtime::component::bindgen!({
|
||||
path: "../extension_api/wit/since_v0.0.1",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -24,53 +27,6 @@ pub fn linker() -> &'static Linker<WasmState> {
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<latest::Os> for Os {
|
||||
fn from(value: latest::Os) -> Self {
|
||||
match value {
|
||||
latest::Os::Mac => Os::Mac,
|
||||
latest::Os::Linux => Os::Linux,
|
||||
latest::Os::Windows => Os::Windows,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<latest::Architecture> for Architecture {
|
||||
fn from(value: latest::Architecture) -> Self {
|
||||
match value {
|
||||
latest::Architecture::Aarch64 => Self::Aarch64,
|
||||
latest::Architecture::X86 => Self::X86,
|
||||
latest::Architecture::X8664 => Self::X8664,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<latest::GithubRelease> for GithubRelease {
|
||||
fn from(value: latest::GithubRelease) -> Self {
|
||||
Self {
|
||||
version: value.version,
|
||||
assets: value.assets.into_iter().map(|asset| asset.into()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<latest::GithubReleaseAsset> for GithubReleaseAsset {
|
||||
fn from(value: latest::GithubReleaseAsset) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
download_url: value.download_url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GithubReleaseOptions> for latest::GithubReleaseOptions {
|
||||
fn from(value: GithubReleaseOptions) -> Self {
|
||||
Self {
|
||||
require_assets: value.require_assets,
|
||||
pre_release: value.pre_release,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
@@ -82,8 +38,8 @@ impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<latest::LanguageServerConfig> for LanguageServerConfig {
|
||||
fn from(value: latest::LanguageServerConfig) -> Self {
|
||||
impl From<since_v0_0_4::LanguageServerConfig> for LanguageServerConfig {
|
||||
fn from(value: since_v0_0_4::LanguageServerConfig) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
language_name: value.language_name,
|
||||
@@ -134,21 +90,21 @@ impl HostWorktree for WasmState {
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::ExtensionImports::node_binary_path(self).await
|
||||
latest::nodejs::Host::node_binary_path(self).await
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::ExtensionImports::npm_package_latest_version(self, package_name).await
|
||||
latest::nodejs::Host::npm_package_latest_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<Option<String>, String>> {
|
||||
latest::ExtensionImports::npm_package_installed_version(self, package_name).await
|
||||
latest::nodejs::Host::npm_package_installed_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_install_package(
|
||||
@@ -156,7 +112,7 @@ impl ExtensionImports for WasmState {
|
||||
package_name: String,
|
||||
version: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::npm_install_package(self, package_name, version).await
|
||||
latest::nodejs::Host::npm_install_package(self, package_name, version).await
|
||||
}
|
||||
|
||||
async fn latest_github_release(
|
||||
@@ -164,17 +120,11 @@ impl ExtensionImports for WasmState {
|
||||
repo: String,
|
||||
options: GithubReleaseOptions,
|
||||
) -> wasmtime::Result<Result<GithubRelease, String>> {
|
||||
Ok(
|
||||
latest::ExtensionImports::latest_github_release(self, repo, options.into())
|
||||
.await?
|
||||
.map(|github| github.into()),
|
||||
)
|
||||
latest::zed::extension::github::Host::latest_github_release(self, repo, options).await
|
||||
}
|
||||
|
||||
async fn current_platform(&mut self) -> Result<(Os, Architecture)> {
|
||||
latest::ExtensionImports::current_platform(self)
|
||||
.await
|
||||
.map(|(os, arch)| (os.into(), arch.into()))
|
||||
latest::zed::extension::platform::Host::current_platform(self).await
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
|
||||
@@ -1,29 +1,21 @@
|
||||
use crate::wasm_host::wit::ToWasmtimeResult;
|
||||
use super::latest;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use futures::io::BufReader;
|
||||
use language::{LanguageServerBinaryStatus, LspAdapterDelegate};
|
||||
use language::LspAdapterDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::path::Path;
|
||||
use std::{
|
||||
env,
|
||||
path::PathBuf,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
use util::maybe;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 4);
|
||||
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 5);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
path: "../extension_api/wit/since_v0.0.4",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -34,6 +26,46 @@ pub fn linker() -> &'static Linker<WasmState> {
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
DownloadedFileType::Gzip => latest::DownloadedFileType::Gzip,
|
||||
DownloadedFileType::GzipTar => latest::DownloadedFileType::GzipTar,
|
||||
DownloadedFileType::Zip => latest::DownloadedFileType::Zip,
|
||||
DownloadedFileType::Uncompressed => latest::DownloadedFileType::Uncompressed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageServerInstallationStatus> for latest::LanguageServerInstallationStatus {
|
||||
fn from(value: LanguageServerInstallationStatus) -> Self {
|
||||
match value {
|
||||
LanguageServerInstallationStatus::None => {
|
||||
latest::LanguageServerInstallationStatus::None
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
latest::LanguageServerInstallationStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
latest::LanguageServerInstallationStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
latest::LanguageServerInstallationStatus::Failed(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Command> for latest::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn read_text_file(
|
||||
@@ -41,19 +73,14 @@ impl HostWorktree for WasmState {
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.read_text_file(path.into())
|
||||
.await
|
||||
.map_err(|error| error.to_string()))
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.shell_env().await.into_iter().collect())
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
@@ -61,15 +88,11 @@ impl HostWorktree for WasmState {
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string()))
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
// we only ever hand out borrows of worktrees
|
||||
// We only ever hand out borrows of worktrees.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -77,34 +100,21 @@ impl HostWorktree for WasmState {
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.binary_path()
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.to_wasmtime_result()
|
||||
latest::nodejs::Host::node_binary_path(self).await
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_package_latest_version(&package_name)
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
latest::nodejs::Host::npm_package_latest_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<Option<String>, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_package_installed_version(&self.work_dir(), &package_name)
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
latest::nodejs::Host::npm_package_installed_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_install_package(
|
||||
@@ -112,11 +122,7 @@ impl ExtensionImports for WasmState {
|
||||
package_name: String,
|
||||
version: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_install_packages(&self.work_dir(), &[(&package_name, &version)])
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
latest::nodejs::Host::npm_install_package(self, package_name, version).await
|
||||
}
|
||||
|
||||
async fn latest_github_release(
|
||||
@@ -124,45 +130,11 @@ impl ExtensionImports for WasmState {
|
||||
repo: String,
|
||||
options: GithubReleaseOptions,
|
||||
) -> wasmtime::Result<Result<GithubRelease, String>> {
|
||||
maybe!(async {
|
||||
let release = util::github::latest_github_release(
|
||||
&repo,
|
||||
options.require_assets,
|
||||
options.pre_release,
|
||||
self.host.http_client.clone(),
|
||||
)
|
||||
.await?;
|
||||
Ok(GithubRelease {
|
||||
version: release.tag_name,
|
||||
assets: release
|
||||
.assets
|
||||
.into_iter()
|
||||
.map(|asset| GithubReleaseAsset {
|
||||
name: asset.name,
|
||||
download_url: asset.browser_download_url,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
latest::zed::extension::github::Host::latest_github_release(self, repo, options).await
|
||||
}
|
||||
|
||||
async fn current_platform(&mut self) -> Result<(Os, Architecture)> {
|
||||
Ok((
|
||||
match env::consts::OS {
|
||||
"macos" => Os::Mac,
|
||||
"linux" => Os::Linux,
|
||||
"windows" => Os::Windows,
|
||||
_ => panic!("unsupported os"),
|
||||
},
|
||||
match env::consts::ARCH {
|
||||
"aarch64" => Architecture::Aarch64,
|
||||
"x86" => Architecture::X86,
|
||||
"x86_64" => Architecture::X8664,
|
||||
_ => panic!("unsupported architecture"),
|
||||
},
|
||||
))
|
||||
latest::zed::extension::platform::Host::current_platform(self).await
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
@@ -170,23 +142,12 @@ impl ExtensionImports for WasmState {
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
let status = match status {
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
LanguageServerBinaryStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None,
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
LanguageServerBinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
|
||||
self.host
|
||||
.language_registry
|
||||
.update_lsp_status(language::LanguageServerName(server_name.into()), status);
|
||||
Ok(())
|
||||
latest::ExtensionImports::set_language_server_installation_status(
|
||||
self,
|
||||
server_name,
|
||||
status.into(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
@@ -195,103 +156,10 @@ impl ExtensionImports for WasmState {
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
maybe!(async {
|
||||
let path = PathBuf::from(path);
|
||||
let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref());
|
||||
|
||||
self.host.fs.create_dir(&extension_work_dir).await?;
|
||||
|
||||
let destination_path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, &path)?;
|
||||
|
||||
let mut response = self
|
||||
.host
|
||||
.http_client
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"download failed with status {}",
|
||||
response.status().to_string()
|
||||
))?;
|
||||
}
|
||||
let body = BufReader::new(response.body_mut());
|
||||
|
||||
match file_type {
|
||||
DownloadedFileType::Uncompressed => {
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Gzip => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::GzipTar => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.extract_tar_file(&destination_path, Archive::new(body))
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Zip => {
|
||||
let file_name = destination_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid download path"))?
|
||||
.to_string_lossy();
|
||||
let zip_filename = format!("{file_name}.zip");
|
||||
let mut zip_path = destination_path.clone();
|
||||
zip_path.set_file_name(zip_filename);
|
||||
|
||||
futures::pin_mut!(body);
|
||||
self.host.fs.create_file_with(&zip_path, body).await?;
|
||||
|
||||
let unzip_status = std::process::Command::new("unzip")
|
||||
.current_dir(&extension_work_dir)
|
||||
.arg("-d")
|
||||
.arg(&destination_path)
|
||||
.arg(&zip_path)
|
||||
.output()?
|
||||
.status;
|
||||
if !unzip_status.success() {
|
||||
Err(anyhow!("failed to unzip {} archive", path.display()))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
latest::ExtensionImports::download_file(self, url, path, file_type.into()).await
|
||||
}
|
||||
|
||||
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
|
||||
#[allow(unused)]
|
||||
let path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, Path::new(&path))?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs::{self, Permissions};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
return fs::set_permissions(&path, Permissions::from_mode(0o755))
|
||||
.map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
|
||||
.to_wasmtime_result();
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
Ok(Ok(()))
|
||||
latest::ExtensionImports::make_file_executable(self, path).await
|
||||
}
|
||||
}
|
||||
|
||||