Compare commits

..

7 Commits

Author SHA1 Message Date
Marshall Bowers
9641ae0755 Remove basic.conf (#10120)
This PR removes the `basic.conf` file.

In #10099 we suppressed some typo warnings that had cropped up in this
file, but it turns out we don't need the file at all.

Release Notes:

- N/A
2024-04-03 12:32:47 -04:00
Kirill Bulatov
ce73ff9808 Avoid failing format test with current date (#10068)
Replace the test that tested with
`chrono::offset::Local::now().naive_local()` taken, failing the
formatting once per year at least.


Release Notes:

- N/A
2024-04-03 12:32:40 -04:00
Joseph T. Lyons
240db73199 v0.129.x stable 2024-04-03 12:11:10 -04:00
gcp-cherry-pick-bot[bot]
6b52917e75 Don't update active completion for editors that are not focused (cherry-pick #9904) (#9907)
Cherry-picked Don't update active completion for editors that are not
focused (#9904)

Release Notes:

- N/A

Co-authored-by: Antonio Scandurra <me@as-cii.com>
2024-03-28 10:52:33 +01:00
Marshall Bowers
f226a9932a zed 0.129.1 2024-03-27 13:50:53 -04:00
Marshall Bowers
a7915cb848 Look up extensions in the new index when reporting extension events (#9879)
This PR fixes a bug that was causing extension telemetry events to not
be reported.

We need to look up the extensions in the new index, as the extensions to
load won't be found in the old index.

Release Notes:

- N/A
2024-03-27 13:48:45 -04:00
Joseph T. Lyons
2d8288f076 v0.129.x preview 2024-03-27 10:52:55 -04:00
693 changed files with 13650 additions and 35808 deletions

View File

@@ -23,6 +23,12 @@ body:
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
validations:
required: true
- type: textarea
attributes:
label: If applicable, add mockups / screenshots to help explain present your vision of the feature
description: Drag issues into the text input below
validations:
required: false
- type: textarea
attributes:
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.

View File

@@ -54,9 +54,6 @@ jobs:
- name: Check unused dependencies
uses: bnjbvr/cargo-machete@main
- name: Check license generation
run: script/generate-licenses /tmp/zed_licenses_output
- name: Ensure fresh merge
shell: bash -euxo pipefail {0}
run: |

View File

@@ -32,10 +32,4 @@ jobs:
- name: Run Danger
run: pnpm run --dir script/danger danger ci
env:
# This GitHub token is not used, but the value needs to be here to prevent
# Danger from throwing an error.
GITHUB_TOKEN: "not_a_real_token"
# All requests are instead proxied through an instance of
# https://github.com/maxdeviant/danger-proxy that allows Danger to securely
# authenticate with GitHub while still being able to run on PRs from forks.
DANGER_GITHUB_API_BASE_URL: "https://danger-proxy.fly.dev/github"
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -9,10 +9,10 @@ jobs:
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: "3.11"
python-version: "3.10.5"
architecture: "x64"
cache: "pip"
- run: pip install -r script/update_top_ranking_issues/requirements.txt
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
- run: python script/update_top_ranking_issues/main.py 5393 --github-token ${{ secrets.GITHUB_TOKEN }} --prod

View File

@@ -9,10 +9,10 @@ jobs:
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: "3.11"
python-version: "3.10.5"
architecture: "x64"
cache: "pip"
- run: pip install -r script/update_top_ranking_issues/requirements.txt
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
- run: python script/update_top_ranking_issues/main.py 6952 --github-token ${{ secrets.GITHUB_TOKEN }} --prod --query-day-interval 7

View File

@@ -1,7 +0,0 @@
[
{
"label": "clippy",
"command": "cargo",
"args": ["xtask", "clippy"]
}
]

View File

@@ -2,6 +2,8 @@
Thanks for your interest in contributing to Zed, the collaborative platform that is also a code editor!
We want to avoid anyone spending time on a pull request that may not be accepted, so we suggest you discuss your ideas with the team and community before starting on major changes. Bug fixes, however, are almost always welcome.
All activity in Zed forums is subject to our [Code of Conduct](https://zed.dev/docs/code-of-conduct). Additionally, contributors must sign our [Contributor License Agreement](https://zed.dev/cla) before their contributions can be merged.
## Contribution ideas
@@ -11,7 +13,7 @@ If you're looking for ideas about what to work on, check out:
- Our [public roadmap](https://zed.dev/roadmap) contains a rough outline of our near-term priorities for Zed.
- Our [top-ranking issues](https://github.com/zed-industries/zed/issues/5393) based on votes by the community.
For adding themes or support for a new language to Zed, check out our [extension docs](https://github.com/zed-industries/extensions/blob/main/AUTHORING_EXTENSIONS.md).
Outside of a handful of extremely popular languages and themes, we are generally not looking to extend Zed's language or theme support by directly building them into Zed. We really want to build a plugin system to handle making the editor extensible going forward. If you are passionate about shipping new languages or themes we suggest contributing to the extension system to help us get there faster.
## Proposing changes

1320
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
[workspace]
members = [
"crates/activity_indicator",
"crates/anthropic",
"crates/assets",
"crates/assistant",
"crates/audio",
@@ -29,7 +28,6 @@ members = [
"crates/feature_flags",
"crates/feedback",
"crates/file_finder",
"crates/file_icons",
"crates/fs",
"crates/fsevent",
"crates/fuzzy",
@@ -38,7 +36,6 @@ members = [
"crates/google_ai",
"crates/gpui",
"crates/gpui_macros",
"crates/headless",
"crates/image_viewer",
"crates/install_cli",
"crates/journal",
@@ -73,8 +70,6 @@ members = [
"crates/task",
"crates/tasks_ui",
"crates/search",
"crates/semantic_index",
"crates/semantic_version",
"crates/settings",
"crates/snippet",
"crates/sqlez",
@@ -82,7 +77,6 @@ members = [
"crates/story",
"crates/storybook",
"crates/sum_tree",
"crates/tab_switcher",
"crates/terminal",
"crates/terminal_view",
"crates/text",
@@ -92,7 +86,6 @@ members = [
"crates/telemetry_events",
"crates/time_format",
"crates/ui",
"crates/ui_text_field",
"crates/util",
"crates/vcs_menu",
"crates/vim",
@@ -103,26 +96,12 @@ members = [
"crates/zed_actions",
"extensions/astro",
"extensions/clojure",
"extensions/csharp",
"extensions/dart",
"extensions/elm",
"extensions/emmet",
"extensions/erlang",
"extensions/gleam",
"extensions/haskell",
"extensions/html",
"extensions/lua",
"extensions/ocaml",
"extensions/php",
"extensions/prisma",
"extensions/purescript",
"extensions/svelte",
"extensions/terraform",
"extensions/toml",
"extensions/uiua",
"extensions/vue",
"extensions/zig",
"tooling/xtask",
]
@@ -132,7 +111,6 @@ resolver = "2"
[workspace.dependencies]
activity_indicator = { path = "crates/activity_indicator" }
ai = { path = "crates/ai" }
anthropic = { path = "crates/anthropic" }
assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" }
audio = { path = "crates/audio" }
@@ -160,7 +138,6 @@ extensions_ui = { path = "crates/extensions_ui" }
feature_flags = { path = "crates/feature_flags" }
feedback = { path = "crates/feedback" }
file_finder = { path = "crates/file_finder" }
file_icons = { path = "crates/file_icons" }
fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
@@ -169,7 +146,6 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
gpui = { path = "crates/gpui" }
gpui_macros = { path = "crates/gpui_macros" }
headless = { path = "crates/headless" }
install_cli = { path = "crates/install_cli" }
image_viewer = { path = "crates/image_viewer" }
journal = { path = "crates/journal" }
@@ -205,7 +181,6 @@ rpc = { path = "crates/rpc" }
task = { path = "crates/task" }
tasks_ui = { path = "crates/tasks_ui" }
search = { path = "crates/search" }
semantic_version = { path = "crates/semantic_version" }
settings = { path = "crates/settings" }
snippet = { path = "crates/snippet" }
sqlez = { path = "crates/sqlez" }
@@ -213,7 +188,6 @@ sqlez_macros = { path = "crates/sqlez_macros" }
story = { path = "crates/story" }
storybook = { path = "crates/storybook" }
sum_tree = { path = "crates/sum_tree" }
tab_switcher = { path = "crates/tab_switcher" }
terminal = { path = "crates/terminal" }
terminal_view = { path = "crates/terminal_view" }
text = { path = "crates/text" }
@@ -223,7 +197,6 @@ theme_selector = { path = "crates/theme_selector" }
telemetry_events = { path = "crates/telemetry_events" }
time_format = { path = "crates/time_format" }
ui = { path = "crates/ui" }
ui_text_field = { path = "crates/ui_text_field" }
util = { path = "crates/util" }
vcs_menu = { path = "crates/vcs_menu" }
vim = { path = "crates/vim" }
@@ -233,33 +206,29 @@ zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
anyhow = "1.0.57"
any_vec = "0.13"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-fs = "1.6"
async-recursion = "1.0.0"
async-tar = "0.4.2"
async-trait = "0.1"
bitflags = "2.4.2"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "810ec594358aafea29a4a3d8ab601d25292b2ce4" }
blade-graphics = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
blade-rwh = { package = "raw-window-handle", version = "0.5" }
cap-std = "3.0"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4.4", features = ["derive"] }
clickhouse = { version = "0.11.6" }
ctor = "0.2.6"
ctrlc = "3.4.4"
core-foundation = { version = "0.9.3" }
core-foundation-sys = "0.8.6"
derive_more = "0.99.17"
emojis = "0.6.1"
env_logger = "0.9"
futures = "0.3"
futures-batch = "0.6.1"
futures-lite = "1.13"
git2 = { version = "0.18", default-features = false }
git2 = { version = "0.15", default-features = false }
globset = "0.4"
heed = { git = "https://github.com/meilisearch/heed", rev = "036ac23f73a021894974b9adc815bc95b3e0482a", features = ["read-txn-no-tls"] }
hex = "0.4.3"
ignore = "0.4.22"
indoc = "1"
@@ -298,7 +267,6 @@ serde_json_lenient = { version = "0.1", features = [
] }
serde_repr = "0.1"
sha2 = "0.10"
shlex = "1.3"
shellexpand = "2.1.0"
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
@@ -309,8 +277,6 @@ tempfile = "3.9.0"
thiserror = "1.0.29"
tiktoken-rs = "0.5.7"
time = { version = "0.3", features = [
"macros",
"parsing",
"serde",
"serde-well-known",
"formatting",
@@ -321,31 +287,47 @@ tower-http = "0.4.4"
tree-sitter = { version = "0.20", features = ["wasm"] }
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "7331995b19b8f8aba2d5e26deb51d2195c18bc94" }
tree-sitter-c = "0.20.1"
tree-sitter-clojure = { git = "https://github.com/prcastro/tree-sitter-clojure", branch = "update-ts" }
tree-sitter-c-sharp = { git = "https://github.com/tree-sitter/tree-sitter-c-sharp", rev = "dd5e59721a5f8dae34604060833902b882023aaf" }
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-dart = { git = "https://github.com/agent3bood/tree-sitter-dart", rev = "48934e3bf757a9b78f17bdfaa3e2b4284656fdc7" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40" }
tree-sitter-embedded-template = "0.20.0"
tree-sitter-erlang = "0.4.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" }
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work" }
tree-sitter-hcl = { git = "https://github.com/MichaHoffmann/tree-sitter-hcl", rev = "v1.1.0" }
rustc-demangle = "0.1.23"
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "2e1348c3cf2c9323e87c2744796cf3f3868aa82a" }
tree-sitter-html = "0.19.0"
tree-sitter-jsdoc = { git = "https://github.com/tree-sitter/tree-sitter-jsdoc", ref = "6a6cf9e7341af32d8e2b2e24a37fbfebefc3dc55" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "40a81c01a40ac48744e0c8ccabbaba1920441199" }
tree-sitter-lua = "0.0.14"
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "7dd29f9616822e5fc259f5b4ae6c4ded9a71a132" }
tree-sitter-ocaml = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "4abfdc1c7af2c6c77a370aee974627be1c285b3b" }
tree-sitter-php = "0.21.1"
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
tree-sitter-python = "0.20.2"
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a" }
tree-sitter-regex = "0.20.0"
tree-sitter-ruby = "0.20.0"
tree-sitter-rust = "0.20.3"
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9" }
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42" }
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
tree-sitter-zig = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
unindent = "0.1.7"
unicase = "2.6"
unicode-segmentation = "1.10"
url = "2.2"
uuid = { version = "1.1.2", features = ["v4", "v5"] }
uuid = { version = "1.1.2", features = ["v4"] }
wasmparser = "0.201"
wasm-encoder = "0.201"
wasmtime = { version = "19.0.0", default-features = false, features = [
@@ -402,7 +384,6 @@ debug = "limited"
[profile.dev.package]
taffy = { opt-level = 3 }
cranelift-codegen = { opt-level = 3 }
resvg = { opt-level = 3 }
rustybuzz = { opt-level = 3 }
ttf-parser = { opt-level = 3 }
wasmtime-cranelift = { opt-level = 3 }

View File

@@ -1,3 +1,3 @@
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve
collab: RUST_LOG=${RUST_LOG:-warn,tower_http=info,collab=info} cargo run --package=collab serve
livekit: livekit-server --dev
blob_store: ./script/run-local-minio

View File

@@ -17,7 +17,7 @@ Support for additional platforms is on our [roadmap](https://zed.dev/roadmap):
For macOS users, you can also install Zed using [Homebrew](https://brew.sh/):
```sh
brew install --cask zed
brew install zed
```
Alternatively, to install the Preview release:

View File

@@ -1,3 +1,4 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="m12 6.668 2-2L11.332 2l-2 2M12 6.668l-6.668 6.664H2.668v-2.664L9.332 4M12 6.668 9.332 4" stroke="black" stroke-width="1" stroke-linejoin="round"/>
</svg>
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M18 10L21 7L17 3L14 6M18 10L8 20H4V16L14 6M18 10L14 6" stroke="#000000" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 239 B

After

Width:  |  Height:  |  Size: 379 B

View File

@@ -1,4 +0,0 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<circle cx="4" cy="11" r="1" fill="#787D87"/>
<path d="M9 2.5V5M9 5V7.5M9 5H11.5M9 5H6.5M9 5L10.6667 3.33333M9 5L7.33333 6.6667M9 5L10.6667 6.6667M9 5L7.33333 3.33333" stroke="#787D87" stroke-width="1.25" stroke-linecap="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 333 B

View File

@@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M20 17V15.8C20 14.1198 20 13.2798 19.673 12.638C19.3854 12.0735 18.9265 11.6146 18.362 11.327C17.7202 11 16.8802 11 15.2 11H4M4 11L8 7M4 11L8 15" stroke="#000000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 468 B

View File

@@ -1,3 +1,56 @@
<svg width="16" height="16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M2.668 11.332v-.797c0-1.12 0-1.683.219-2.11.191-.374.496-.683.87-.874.43-.219.99-.219 2.11-.219h7.469m0 0-2.668-2.664m2.668 2.664L10.668 10" stroke="black" stroke-width="1.33334" stroke-linecap="round"/>
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Transformed by: SVG Repo Mixer Tools -->
<svg
width="800px"
height="800px"
viewBox="0 0 24 24"
fill="none"
version="1.1"
id="svg1"
sodipodi:docname="reply-svgrepo-com.svg"
inkscape:version="1.3.2 (091e20e, 2023-11-25)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<defs
id="defs1" />
<sodipodi:namedview
id="namedview1"
pagecolor="#505050"
bordercolor="#ffffff"
borderopacity="1"
inkscape:showpageshadow="0"
inkscape:pageopacity="0"
inkscape:pagecheckerboard="1"
inkscape:deskcolor="#505050"
showgrid="false"
inkscape:zoom="0.39996789"
inkscape:cx="435.03492"
inkscape:cy="417.53351"
inkscape:window-width="1440"
inkscape:window-height="847"
inkscape:window-x="0"
inkscape:window-y="25"
inkscape:window-maximized="1"
inkscape:current-layer="svg1" />
<g
id="SVGRepo_bgCarrier"
stroke-width="0" />
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round" />
<g
id="SVGRepo_iconCarrier"
transform="matrix(-1,0,0,1,24.001548,0)">
<path
d="M 20,17 V 15.8 C 20,14.1198 20,13.2798 19.673,12.638 19.3854,12.0735 18.9265,11.6146 18.362,11.327 17.7202,11 16.8802,11 15.2,11 H 4 m 0,0 4,-4 m -4,4 4,4"
stroke="#000000"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
id="path1" />
</g>
</svg>

Before

Width:  |  Height:  |  Size: 296 B

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1,5 +1,5 @@
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.5 7V9.5M9.5 9.5V12M9.5 9.5H12M9.5 9.5H7M9.5 9.5L11.1667 7.83333M9.5 9.5L7.83333 11.1667M9.5 9.5L11.1667 11.1667M9.5 9.5L7.83333 7.83333" stroke="#687076" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2.19368 3.84945C2.1919 4.2642 2.32866 4.59866 2.60675 4.84709C2.88054 5.09168 3.25138 5.26935 3.71611 5.38242L4.30921 5.53136C4.50605 5.57882 4.67126 5.63135 4.80449 5.68845C4.93818 5.74501 5.03566 5.81208 5.10003 5.88772C5.16394 5.96098 5.19718 6.05224 5.19718 6.16391C5.19718 6.28414 5.16092 6.38935 5.08822 6.48143C5.01498 6.5742 4.91033 6.6484 4.77143 6.70261C4.63494 6.75654 4.47187 6.78432 4.28148 6.78432C4.08803 6.78432 3.91609 6.75498 3.76493 6.69728C3.61656 6.63822 3.49926 6.55211 3.41134 6.43944C3.35022 6.35823 3.30749 6.26206 3.28377 6.14994C3.2624 6.0489 3.17722 5.96227 3.0652 5.96227H2.26368C2.14684 5.96227 2.04844 6.05921 2.05922 6.18014C2.08844 6.50804 2.18262 6.79104 2.34333 7.02737C2.53198 7.30027 2.79379 7.50589 3.12635 7.64401C3.46002 7.78184 3.84995 7.85002 4.29478 7.85002C4.74295 7.85002 5.12861 7.78282 5.45025 7.64653C5.77317 7.50864 6.02261 7.31419 6.19553 7.06219C6.37039 6.80819 6.45792 6.50903 6.45974 6.16684C6.45792 5.93398 6.41515 5.72484 6.33014 5.5418C6.24742 5.35989 6.13063 5.20177 5.98009 5.06775C5.8304 4.9345 5.65391 4.82275 5.45112 4.73222C5.24921 4.64208 5.02797 4.57018 4.78759 4.51634L4.29843 4.39937C4.18153 4.37319 4.07118 4.3417 3.96872 4.30525C3.86717 4.26736 3.77849 4.22377 3.70234 4.17473C3.62798 4.1251 3.57039 4.06719 3.52851 4.00126C3.49014 3.93817 3.47159 3.86314 3.47483 3.77409L3.47486 3.77227C3.47486 3.66565 3.50529 3.57148 3.56614 3.4881C3.62872 3.40477 3.71979 3.33803 3.84237 3.28933C3.96413 3.2393 4.11652 3.21306 4.3001 3.21306C4.57008 3.21306 4.77748 3.27107 4.92756 3.38156C5.04237 3.4661 5.1184 3.57596 5.15675 3.71261C5.18354 3.80804 5.26638 3.89144 5.37613 3.89144H6.17261C6.28854 3.89144 6.38808 3.79532 6.37517 3.67384C6.34688 3.40772 6.26053 3.16833 6.11582 2.9566C5.94161 2.70171 5.69822 2.5037 5.38764 2.36203L5.36689 2.40752M2.19368 3.84945C2.19189 3.51006 2.28244 3.21141 2.46646 2.95562C2.65136 2.70115 2.90449 2.50328 3.2237 2.36181C3.54318 2.22022 3.90496 2.15002 4.30809 2.15002C4.71811 2.15002 5.07832 2.22011 5.38764 2.36203L5.36689 2.40752M4.7896 6.74919C4.93504 6.69243 5.04766 6.61351 5.12747 6.51242ZM4.7896 6.74919L5.12747 6.51242ZM5.12747 6.51242C5.20728 6.41132 5.24718 6.29516 5.24718 6.16391ZM5.12747 6.51242L5.24718 6.16391ZM5.24718 6.16391C5.24718 6.04154 5.21082 5.93867 5.13811 5.85531L5.24718 6.16391Z" fill="#687076"/>
<path d="M2.19368 3.84945C2.1919 4.2642 2.32866 4.59866 2.60675 4.84709C2.88054 5.09168 3.25138 5.26935 3.71611 5.38242L4.30921 5.53136C4.50605 5.57882 4.67126 5.63135 4.80449 5.68845C4.93818 5.74501 5.03566 5.81208 5.10003 5.88772C5.16394 5.96098 5.19718 6.05224 5.19718 6.16391C5.19718 6.28414 5.16092 6.38935 5.08822 6.48143C5.01498 6.5742 4.91033 6.6484 4.77143 6.70261C4.63494 6.75654 4.47187 6.78432 4.28148 6.78432C4.08803 6.78432 3.91609 6.75498 3.76493 6.69728C3.61656 6.63822 3.49926 6.55211 3.41134 6.43944C3.35022 6.35823 3.30749 6.26206 3.28377 6.14994C3.2624 6.0489 3.17722 5.96227 3.0652 5.96227H2.26368C2.14684 5.96227 2.04844 6.05921 2.05922 6.18014C2.08844 6.50804 2.18262 6.79104 2.34333 7.02737C2.53198 7.30027 2.79379 7.50589 3.12635 7.64401C3.46002 7.78184 3.84995 7.85002 4.29478 7.85002C4.74295 7.85002 5.12861 7.78282 5.45025 7.64653C5.77317 7.50864 6.02261 7.31419 6.19553 7.06219C6.37039 6.80819 6.45792 6.50903 6.45974 6.16684C6.45792 5.93398 6.41515 5.72484 6.33014 5.5418C6.24742 5.35989 6.13063 5.20177 5.98009 5.06775C5.8304 4.9345 5.65391 4.82275 5.45112 4.73222C5.24921 4.64208 5.02797 4.57018 4.78759 4.51634L4.29843 4.39937C4.18153 4.37319 4.07118 4.3417 3.96872 4.30525C3.86717 4.26736 3.77849 4.22377 3.70234 4.17473C3.62798 4.1251 3.57039 4.06719 3.52851 4.00126C3.49014 3.93817 3.47159 3.86314 3.47483 3.77409L3.47486 3.77227C3.47486 3.66565 3.50529 3.57148 3.56614 3.4881C3.62872 3.40477 3.71979 3.33803 3.84237 3.28933C3.96413 3.2393 4.11652 3.21306 4.3001 3.21306C4.57008 3.21306 4.77748 3.27107 4.92756 3.38156C5.04237 3.4661 5.1184 3.57596 5.15675 3.71261C5.18354 3.80804 5.26638 3.89144 5.37613 3.89144H6.17261C6.28854 3.89144 6.38808 3.79532 6.37517 3.67384C6.34688 3.40772 6.26053 3.16833 6.11582 2.9566C5.94161 2.70171 5.69822 2.5037 5.38764 2.36203M2.19368 3.84945C2.19189 3.51006 2.28244 3.21141 2.46646 2.95562C2.65136 2.70115 2.90449 2.50328 3.2237 2.36181C3.54318 2.22022 3.90496 2.15002 4.30809 2.15002C4.71811 2.15002 5.07832 2.22011 5.38764 2.36203M2.19368 3.84945L2.24368 3.84942M5.38764 2.36203L5.36689 2.40752M5.36689 2.40752C5.06539 2.26919 4.71246 2.20002 4.30809 2.20002C3.91081 2.20002 3.5561 2.26919 3.24396 2.40752C2.93181 2.54586 2.68618 2.73829 2.50705 2.98482M5.36689 2.40752C5.67017 2.54586 5.90605 2.73829 6.07454 2.98482C6.21435 3.18938 6.29799 3.42082 6.32545 3.67912C6.3349 3.76801 6.262 3.84144 6.17261 3.84144H5.37613C5.29388 3.84144 5.22712 3.77829 5.20489 3.6991C5.16352 3.55168 5.08096 3.43241 4.9572 3.3413C4.79581 3.22247 4.57678 3.16306 4.3001 3.16306M4.7896 6.74919C4.64595 6.80594 4.47657 6.83432 4.28148 6.83432C4.08285 6.83432 3.9046 6.80417 3.74676 6.74386C3.59069 6.68179 3.46565 6.59045 3.37165 6.46985M4.7896 6.74919C4.93504 6.69243 5.04766 6.61351 5.12747 6.51242M4.7896 6.74919L5.12747 6.51242M5.12747 6.51242C5.20728 6.41132 5.24718 6.29516 5.24718 6.16391M5.12747 6.51242L5.24718 6.16391M5.24718 6.16391C5.24718 6.04154 5.21082 5.93867 5.13811 5.85531L5.24718 6.16391Z" stroke="#687076" stroke-width="0.1"/>
<path d="M9.5 7V9.5M9.5 12V9.5M12 9.5H9.5M7 9.5H9.5M9.5 9.5L11.1667 7.83333M9.5 9.5L7.83333 11.1667M9.5 9.5L11.1667 11.1667M9.5 9.5L7.83333 7.83333" stroke="#11181C" stroke-width="1.25" stroke-linecap="round"/>
<path d="M2.19366 3.84943C2.19188 4.26418 2.32864 4.59864 2.60673 4.84707C2.88052 5.09166 3.25136 5.26933 3.71609 5.3824C3.71616 5.38242 3.71623 5.38243 3.7163 5.38245L4.30919 5.53134L4.30919 5.53134L4.30965 5.53145C4.50649 5.57891 4.67124 5.63133 4.80447 5.68843L4.80469 5.68852C4.93838 5.74508 5.03564 5.81206 5.10001 5.8877L5.10001 5.8877L5.10041 5.88816C5.16432 5.96142 5.19716 6.05222 5.19716 6.16389C5.19716 6.28412 5.1609 6.38933 5.0882 6.48141C5.01496 6.57418 4.91031 6.64838 4.77141 6.70259L4.77121 6.70266C4.63472 6.75659 4.47185 6.7843 4.28146 6.7843C4.08801 6.7843 3.91607 6.75496 3.76491 6.69726C3.61654 6.6382 3.49924 6.55209 3.41132 6.43942C3.3502 6.35821 3.30747 6.26204 3.28375 6.14992C3.26238 6.04888 3.1772 5.96225 3.06518 5.96225H2.26366C2.14682 5.96225 2.04842 6.05919 2.0592 6.18012C2.08842 6.50802 2.1826 6.79102 2.34331 7.02735L2.34352 7.02767C2.53217 7.30057 2.79377 7.50587 3.12633 7.64399L3.12642 7.64402C3.46009 7.78185 3.84993 7.85 4.29476 7.85C4.74293 7.85 5.12859 7.7828 5.45023 7.64651L5.45036 7.64646C5.77328 7.50857 6.02259 7.31417 6.19551 7.06217C6.37037 6.80817 6.4579 6.50901 6.45972 6.16682L6.45972 6.16616C6.4579 5.9333 6.41513 5.72482 6.33012 5.54178C6.2474 5.35987 6.13061 5.20175 5.98007 5.06773C5.83038 4.93448 5.65389 4.82273 5.4511 4.7322C5.24919 4.64206 5.02795 4.57016 4.78757 4.51632L4.29841 4.39935L4.29841 4.39934L4.29771 4.39919C4.18081 4.37301 4.07116 4.34168 3.9687 4.30523C3.86715 4.26734 3.77847 4.22375 3.70232 4.17471C3.62796 4.12508 3.57037 4.06717 3.52849 4.00124C3.49012 3.93815 3.47157 3.86312 3.47481 3.77407L3.47484 3.77407V3.77225C3.47484 3.66563 3.50527 3.57146 3.56612 3.48808C3.6287 3.40475 3.71977 3.33801 3.84235 3.28931L3.84235 3.28932L3.84289 3.28909C3.96465 3.23906 4.1165 3.21304 4.30008 3.21304C4.57006 3.21304 4.77746 3.27105 4.92754 3.38154C5.04235 3.46608 5.11838 3.57594 5.15673 3.71259C5.18352 3.80802 5.26636 3.89142 5.37611 3.89142H6.17259C6.28852 3.89142 6.38806 3.7953 6.37515 3.67382C6.34686 3.4077 6.26051 3.16831 6.1158 2.95658C5.94159 2.70169 5.6982 2.50368 5.38762 2.36201L5.36687 2.4075M2.19366 3.84943C2.19187 3.51004 2.28242 3.21139 2.46644 2.9556L2.46658 2.9554C2.65148 2.70093 2.90447 2.50326 3.22368 2.36179C3.54316 2.2202 3.90494 2.15 4.30807 2.15C4.71809 2.15 5.07841 2.22014 5.38773 2.36206L5.36687 2.4075M2.19366 3.84943C2.19366 3.84951 2.19366 3.84959 2.19366 3.84967L2.24366 3.8494L2.19366 3.84918C2.19366 3.84926 2.19366 3.84935 2.19366 3.84943ZM5.36687 2.4075C5.06537 2.26917 4.71244 2.2 4.30807 2.2C3.91079 2.2 3.55608 2.26917 3.24394 2.4075C2.93179 2.54584 2.68616 2.73827 2.50703 2.9848L3.82389 3.24285L3.82389 3.24285C3.95336 3.18964 4.11209 3.16304 4.30008 3.16304C4.57676 3.16304 4.79579 3.22245 4.95718 3.34128C5.08094 3.43239 5.1635 3.55166 5.20487 3.69908C5.2271 3.77827 5.29386 3.84142 5.37611 3.84142H6.17259C6.26198 3.84142 6.33488 3.76799 6.32543 3.6791C6.29797 3.4208 6.21433 3.18936 6.07452 2.9848C5.90603 2.73827 5.67015 2.54584 5.36687 2.4075ZM4.78958 6.74917C4.64593 6.80592 4.47655 6.8343 4.28146 6.8343C4.08283 6.8343 3.90458 6.80415 3.74674 6.74384C3.59067 6.68177 3.46563 6.59043 3.37163 6.46983L4.78958 6.74917ZM4.78958 6.74917C4.93502 6.69241 5.04764 6.61349 5.12745 6.5124M4.78958 6.74917L5.12745 6.5124M5.12745 6.5124C5.20726 6.4113 5.24716 6.29514 5.24716 6.16389M5.12745 6.5124L5.24716 6.16389M5.24716 6.16389C5.24716 6.04152 5.2108 5.93865 5.13809 5.85529L5.24716 6.16389Z" fill="#687076" stroke="#687076" stroke-width="0.1"/>
</svg>

Before

Width:  |  Height:  |  Size: 5.5 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

@@ -1,5 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M7.99993 6.85713C11.1558 6.85713 13.7142 5.83379 13.7142 4.57142C13.7142 3.30905 11.1558 2.28571 7.99993 2.28571C4.84402 2.28571 2.28564 3.30905 2.28564 4.57142C2.28564 5.83379 4.84402 6.85713 7.99993 6.85713Z" fill="black" stroke="black" stroke-width="1.5"/>
<path d="M13.7142 4.57141V11.4286C13.7142 12.691 11.1558 13.7143 7.99993 13.7143C4.84402 13.7143 2.28564 12.691 2.28564 11.4286V4.57141" stroke="black" stroke-width="1.5"/>
<path d="M13.7142 8C13.7142 9.26237 11.1558 10.2857 7.99993 10.2857C4.84402 10.2857 2.28564 9.26237 2.28564 8" stroke="black" stroke-width="1.5"/>
</svg>

Before

Width:  |  Height:  |  Size: 692 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>

Before

Width:  |  Height:  |  Size: 409 B

View File

@@ -16,9 +16,7 @@
"escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"ctrl-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"ctrl-o": "workspace::Open",
@@ -28,7 +26,7 @@
"ctrl-0": "zed::ResetBufferFontSize",
"ctrl-,": "zed::OpenSettings",
"ctrl-q": "zed::Quit",
"alt-f9": "zed::Hide",
"ctrl-h": "zed::Hide",
"f11": "zed::ToggleFullScreen"
}
},
@@ -38,6 +36,7 @@
"escape": "editor::Cancel",
"backspace": "editor::Backspace",
"shift-backspace": "editor::Backspace",
"ctrl-h": "editor::Backspace",
"delete": "editor::Delete",
"ctrl-d": "editor::Delete",
"tab": "editor::Tab",
@@ -137,8 +136,7 @@
// ],
"ctrl-alt-space": "editor::ShowCharacterPalette",
"ctrl-;": "editor::ToggleLineNumbers",
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-alt-g b": "editor::ToggleGitBlame"
"ctrl-k ctrl-r": "editor::RevertSelectedHunks"
}
},
{
@@ -149,11 +147,10 @@
"ctrl-shift-enter": "editor::NewlineBelow",
"ctrl-enter": "editor::NewlineAbove",
"alt-z": "editor::ToggleSoftWrap",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": [
"ctrl-f": [
"buffer_search::Deploy",
{
"replace_enabled": true
"focus": true
}
],
// "cmd-e": [
@@ -212,15 +209,14 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"ctrl-f": "search::FocusSearch",
"ctrl-h": "search::ToggleReplace"
"alt-tab": "search::CycleMode"
}
},
{
"context": "BufferSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-enter": "search::ReplaceAll"
"cmd-enter": "search::ReplaceAll"
}
},
{
@@ -234,10 +230,10 @@
"context": "ProjectSearchBar",
"bindings": {
"escape": "project_search::ToggleFocus",
"ctrl-shift-f": "search::FocusSearch",
"alt-tab": "search::CycleMode",
"ctrl-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ToggleRegex",
"alt-ctrl-x": "search::ToggleRegex"
"alt-ctrl-g": "search::ActivateRegexMode",
"alt-ctrl-x": "search::ActivateTextMode"
}
},
{
@@ -258,15 +254,18 @@
"context": "ProjectSearchView",
"bindings": {
"escape": "project_search::ToggleFocus",
"ctrl-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ToggleRegex",
"alt-ctrl-x": "search::ToggleRegex"
"alt-tab": "search::CycleMode",
"cmd-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ActivateRegexMode",
"alt-ctrl-x": "search::ActivateTextMode"
}
},
{
"context": "Pane",
"bindings": {
"ctrl-shift-tab": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-w": "pane::CloseActiveItem",
"alt-ctrl-t": "pane::CloseInactiveItems",
@@ -280,10 +279,10 @@
"alt-enter": "search::SelectAllMatches",
"alt-c": "search::ToggleCaseSensitive",
"alt-w": "search::ToggleWholeWord",
"alt-r": "search::ToggleRegex",
"alt-r": "search::CycleMode",
"alt-ctrl-f": "project_search::ToggleFilters",
"ctrl-alt-shift-r": "search::ToggleRegex",
"ctrl-alt-shift-x": "search::ToggleRegex"
"ctrl-alt-shift-r": "search::ActivateRegexMode",
"ctrl-alt-shift-x": "search::ActivateTextMode"
}
},
// Bindings from VS Code
@@ -304,10 +303,8 @@
}
],
"ctrl-alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"ctrl-d": [
"editor::SelectNext",
{
@@ -356,14 +353,14 @@
"ctrl-shift-]": "editor::UnfoldLines",
"ctrl-space": "editor::ShowCompletions",
"ctrl-.": "editor::ToggleCodeActions",
"alt-ctrl-r": "editor::RevealInFinder",
"alt-cmd-r": "editor::RevealInFinder",
"ctrl-alt-shift-c": "editor::DisplayCursorNames"
}
},
{
"context": "Editor && mode == full",
"bindings": {
"ctrl-shift-o": "outline::Toggle",
"cmd-shift-o": "outline::Toggle",
"ctrl-g": "go_to_line::Toggle"
}
},
@@ -419,18 +416,10 @@
"ctrl-j": "workspace::ToggleBottomDock",
"ctrl-alt-y": "workspace::CloseAllDocks",
"ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": [
"pane::DeploySearch",
{
"replace_enabled": true
}
],
"ctrl-k ctrl-s": "zed::OpenKeymap",
"ctrl-k ctrl-t": "theme_selector::Toggle",
"ctrl-shift-t": "project_symbols::Toggle",
"ctrl-t": "project_symbols::Toggle",
"ctrl-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"ctrl-e": "file_finder::Toggle",
"ctrl-shift-p": "command_palette::Toggle",
"ctrl-shift-m": "diagnostics::Deploy",
@@ -455,8 +444,6 @@
{
"context": "Editor",
"bindings": {
"ctrl-shift-k": "editor::DeleteLine",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-j": "editor::JoinLines",
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
@@ -557,7 +544,7 @@
"delete": "project_panel::Delete",
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": true }],
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": true }],
"alt-ctrl-r": "project_panel::RevealInFinder",
"alt-cmd-r": "project_panel::RevealInFinder",
"alt-shift-f": "project_panel::NewSearchInDirectory"
}
},
@@ -602,15 +589,6 @@
"context": "FileFinder",
"bindings": { "ctrl-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": {
"ctrl-up": "menu::SelectPrev",
"ctrl-down": "menu::SelectNext",
"ctrl-shift-tab": "menu::SelectPrev",
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Terminal",
"bindings": {
@@ -623,12 +601,7 @@
"pagedown": ["terminal::SendKeystroke", "pagedown"],
"escape": ["terminal::SendKeystroke", "escape"],
"enter": ["terminal::SendKeystroke", "enter"],
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
// Some nice conveniences
"ctrl-backspace": ["terminal::SendText", "\u0015"],
"ctrl-right": ["terminal::SendText", "\u0005"],
"ctrl-left": ["terminal::SendText", "\u0001"]
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
}
}
]

View File

@@ -17,11 +17,8 @@
"cmd-enter": "menu::SecondaryConfirm",
"escape": "menu::Cancel",
"cmd-escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"cmd-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"cmd-o": "workspace::Open",
@@ -158,8 +155,7 @@
],
"ctrl-cmd-space": "editor::ShowCharacterPalette",
"cmd-;": "editor::ToggleLineNumbers",
"cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-alt-g b": "editor::ToggleGitBlame"
"cmd-alt-z": "editor::RevertSelectedHunks"
}
},
{
@@ -170,11 +166,10 @@
"cmd-shift-enter": "editor::NewlineAbove",
"cmd-enter": "editor::NewlineBelow",
"alt-z": "editor::ToggleSoftWrap",
"cmd-f": "buffer_search::Deploy",
"cmd-alt-f": [
"cmd-f": [
"buffer_search::Deploy",
{
"replace_enabled": true
"focus": true
}
],
"cmd-e": [
@@ -233,8 +228,7 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"cmd-f": "search::FocusSearch",
"cmd-alt-f": "search::ToggleReplace"
"alt-tab": "search::CycleMode"
}
},
{
@@ -255,10 +249,10 @@
"context": "ProjectSearchBar",
"bindings": {
"escape": "project_search::ToggleFocus",
"cmd-shift-f": "search::FocusSearch",
"alt-tab": "search::CycleMode",
"cmd-shift-h": "search::ToggleReplace",
"alt-cmd-g": "search::ToggleRegex",
"alt-cmd-x": "search::ToggleRegex"
"alt-cmd-g": "search::ActivateRegexMode",
"alt-cmd-x": "search::ActivateTextMode"
}
},
{
@@ -279,9 +273,10 @@
"context": "ProjectSearchView",
"bindings": {
"escape": "project_search::ToggleFocus",
"alt-tab": "search::CycleMode",
"cmd-shift-h": "search::ToggleReplace",
"alt-cmd-g": "search::ToggleRegex",
"alt-cmd-x": "search::ToggleRegex"
"alt-cmd-g": "search::ActivateRegexMode",
"alt-cmd-x": "search::ActivateTextMode"
}
},
{
@@ -303,9 +298,10 @@
"alt-enter": "search::SelectAllMatches",
"alt-cmd-c": "search::ToggleCaseSensitive",
"alt-cmd-w": "search::ToggleWholeWord",
"alt-tab": "search::CycleMode",
"alt-cmd-f": "project_search::ToggleFilters",
"alt-cmd-g": "search::ToggleRegex",
"alt-cmd-x": "search::ToggleRegex"
"alt-cmd-g": "search::ActivateRegexMode",
"alt-cmd-x": "search::ActivateTextMode"
}
},
// Bindings from VS Code
@@ -321,8 +317,13 @@
"cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"alt-shift-up": "editor::DuplicateLineUp",
"alt-shift-down": "editor::DuplicateLineDown",
"alt-shift-up": [
"editor::DuplicateLine",
{
"move_upwards": true
}
],
"alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"cmd-d": [
@@ -436,18 +437,10 @@
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
"cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": [
"pane::DeploySearch",
{
"replace_enabled": true
}
],
"cmd-k cmd-s": "zed::OpenKeymap",
"cmd-k cmd-t": "theme_selector::Toggle",
"cmd-t": "project_symbols::Toggle",
"cmd-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"cmd-shift-p": "command_palette::Toggle",
"cmd-shift-m": "diagnostics::Deploy",
"cmd-shift-e": "project_panel::ToggleFocus",
@@ -610,15 +603,6 @@
"context": "FileFinder",
"bindings": { "cmd-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": {
"ctrl-up": "menu::SelectPrev",
"ctrl-down": "menu::SelectNext",
"ctrl-shift-tab": "menu::SelectPrev",
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Terminal",
"bindings": {

View File

@@ -11,7 +11,7 @@
"ctrl->": "zed::IncreaseBufferFontSize",
"ctrl-<": "zed::DecreaseBufferFontSize",
"ctrl-shift-j": "editor::JoinLines",
"cmd-d": "editor::DuplicateLineDown",
"cmd-d": "editor::DuplicateLine",
"cmd-backspace": "editor::DeleteLine",
"cmd-pagedown": "editor::MovePageDown",
"cmd-pageup": "editor::MovePageUp",

View File

@@ -9,7 +9,7 @@
"context": "Editor",
"bindings": {
"cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-shift-d": "editor::DuplicateLine",
"cmd-b": "editor::GoToDefinition",
"cmd-j": "editor::ScrollCursorCenter",
"cmd-enter": "editor::NewlineBelow",

View File

@@ -73,17 +73,8 @@
],
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
"/": "vim::Search",
"?": [
"vim::Search",
{
"backwards": true
}
],
"*": "vim::MoveToNext",
"#": "vim::MoveToPrev",
"n": "vim::MoveToNextMatch",
"shift-n": "vim::MoveToPrevMatch",
"n": "search::SelectNextMatch",
"shift-n": "search::SelectPrevMatch",
"%": "vim::Matching",
"f": [
"vim::PushOperator",
@@ -146,10 +137,8 @@
"g d": "editor::GoToDefinition",
"g shift-d": "editor::GoToTypeDefinition",
"g x": "editor::OpenUrl",
"g n": "vim::SelectNextMatch",
"g shift-n": "vim::SelectPreviousMatch",
"g l": "vim::SelectNext",
"g shift-l": "vim::SelectPrevious",
"g n": "vim::SelectNext",
"g shift-n": "vim::SelectPrevious",
"g >": [
"editor::SelectNext",
{
@@ -234,8 +223,6 @@
"displayLines": true
}
],
"g ]": "editor::GoToDiagnostic",
"g [": "editor::GoToPrevDiagnostic",
"shift-h": "vim::WindowTop",
"shift-m": "vim::WindowMiddle",
"shift-l": "vim::WindowBottom",
@@ -362,6 +349,15 @@
],
"u": "editor::Undo",
"ctrl-r": "editor::Redo",
"/": "vim::Search",
"?": [
"vim::Search",
{
"backwards": true
}
],
"*": "vim::MoveToNext",
"#": "vim::MoveToPrev",
"r": ["vim::PushOperator", "Replace"],
"s": "vim::Substitute",
"shift-s": "vim::SubstituteLine",
@@ -369,15 +365,6 @@
"< <": "vim::Outdent",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-pageup": "pane::ActivatePrevItem",
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
}
},
{
"context": "Editor && vim_mode == visual && vim_operator == none && !VimWaiting",
"bindings": {
// tree-sitter related commands
"[ x": "editor::SelectLargerSyntaxNode",
"] x": "editor::SelectSmallerSyntaxNode"
}
@@ -395,46 +382,18 @@
"d": "editor::Rename" // zed specific
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == c",
"bindings": {
"s": [
"vim::PushOperator",
{
"ChangeSurrounds": {}
}
]
}
},
{
"context": "Editor && vim_operator == d",
"bindings": {
"d": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == d",
"bindings": {
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "Editor && vim_operator == y",
"bindings": {
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == y",
"bindings": {
"s": [
"vim::PushOperator",
{
"AddSurrounds": {}
}
]
}
},
{
"context": "Editor && VimObject",
"bindings": {
@@ -543,18 +502,6 @@
]
}
},
{
"context": "Editor && vim_mode == normal",
"bindings": {
"g c c": "editor::ToggleComments"
}
},
{
"context": "Editor && vim_mode == visual",
"bindings": {
"g c": "editor::ToggleComments"
}
},
{
"context": "Editor && vim_mode == insert",
"bindings": {
@@ -599,12 +546,6 @@
"escape": "buffer_search::Dismiss"
}
},
{
"context": "EmptyPane || SharedScreen",
"bindings": {
":": "command_palette::Toggle"
}
},
{
// netrw compatibility
"context": "ProjectPanel && not_editing",
@@ -613,18 +554,17 @@
"%": "project_panel::NewFile",
"/": "project_panel::NewSearchInDirectory",
"d": "project_panel::NewDirectory",
"enter": "project_panel::OpenPermanent",
"enter": "project_panel::Open",
"escape": "project_panel::ToggleFocus",
"h": "project_panel::CollapseSelectedEntry",
"j": "menu::SelectNext",
"k": "menu::SelectPrev",
"l": "project_panel::ExpandSelectedEntry",
"o": "project_panel::OpenPermanent",
"o": "project_panel::Open",
"shift-d": "project_panel::Delete",
"shift-r": "project_panel::Rename",
"t": "project_panel::OpenPermanent",
"v": "project_panel::OpenPermanent",
"p": "project_panel::Open",
"t": "project_panel::Open",
"v": "project_panel::Open",
"x": "project_panel::RevealInFinder"
}
}

View File

@@ -36,7 +36,7 @@
// },
"buffer_line_height": "comfortable",
// The name of a font to use for rendering text in the UI
"ui_font_family": ".SystemUIFont",
"ui_font_family": "Zed Sans",
// The OpenType features to enable for text in the UI
"ui_font_features": {
// Disable ligatures:
@@ -48,8 +48,7 @@
// which gives the same size as all other panes.
"active_pane_magnification": 1.0,
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
// Currently "alt" or "cmd" are supported.
"multi_cursor_modifier": "alt",
// Whether to enable vim modes and key bindings
"vim_mode": false,
@@ -58,8 +57,6 @@
"hover_popover_enabled": true,
// Whether to confirm before quitting Zed.
"confirm_quit": false,
// Whether to restore last closed project when fresh Zed instance is opened.
"restore_on_startup": "last_workspace",
// Whether the cursor blinks in the editor.
"cursor_blink": true,
// Whether to pop the completions menu while typing in an editor without
@@ -72,7 +69,7 @@
// documentation when not included in original completion list.
"completion_documentation_secondary_query_debounce": 300,
// Whether to show wrap guides in the editor. Setting this to true will
// show a guide at the 'preferred_line_length' value if 'soft_wrap' is set to
// show a guide at the 'preferred_line_length' value if softwrap is set to
// 'preferred_line_length', and will show any additional guides as specified
// by the 'wrap_guides' setting.
"show_wrap_guides": true,
@@ -171,9 +168,6 @@
},
// The number of lines to keep above/below the cursor when scrolling.
"vertical_scroll_margin": 3,
// Scroll sensitivity multiplier. This multiplier is applied
// to both the horizontal and vertical delta values while scrolling.
"scroll_sensitivity": 1.0,
"relative_line_numbers": false,
// When to populate a new search's query based on the text under the cursor.
// This setting can take the following three values:
@@ -217,10 +211,7 @@
// Whether to reveal it in the project panel automatically,
// when a corresponding project entry becomes active.
// Gitignored entries are never auto revealed.
"auto_reveal_entries": true,
/// Whether to fold directories automatically
/// when a directory has only one directory inside.
"auto_fold_dirs": false
"auto_reveal_entries": true
},
"collaboration_panel": {
// Whether to show the collaboration panel button in the status bar.
@@ -292,11 +283,6 @@
// 4. Save when idle for a certain amount of time:
// "autosave": { "after_delay": {"milliseconds": 500} },
"autosave": "off",
// Settings related to the editor's tab bar.
"tab_bar": {
// Whether or not to show the navigation history buttons.
"show_nav_history_buttons": true
},
// Settings related to the editor's tabs
"tabs": {
// Show git status colors in the editor tabs.
@@ -304,16 +290,6 @@
// Position of the close button on the editor tabs.
"close_position": "right"
},
// Settings related to preview tabs.
"preview_tabs": {
// Whether preview tabs should be enabled.
// Preview tabs allow you to open files in preview mode, where they close automatically
// when you switch to another file unless you explicitly pin them.
// This is useful for quickly viewing files without cluttering your workspace.
"enabled": true,
// Whether to open files in preview mode when selected from the file finder.
"enable_preview_from_file_finder": false
},
// Whether or not to remove any trailing whitespace from lines of a buffer
// before saving it.
"remove_trailing_whitespace_on_save": true,
@@ -393,15 +369,7 @@
// "git_gutter": "tracked_files"
// 2. Hide the gutter
// "git_gutter": "hide"
"git_gutter": "tracked_files",
// Control whether the git blame information is shown inline,
// in the currently focused line.
"inline_blame": {
"enabled": false
// Sets a delay after which the inline blame information is shown.
// Delay is restarted with every cursor movement.
// "delay_ms": 600
}
"git_gutter": "tracked_files"
},
"copilot": {
// The set of glob patterns for which copilot should be disabled
@@ -576,29 +544,56 @@
"file_types": {},
// Different settings for specific languages.
"languages": {
"C++": {
"format_on_save": "off"
"Plain Text": {
"soft_wrap": "preferred_line_length"
},
"C": {
"format_on_save": "off"
"Elixir": {
"tab_size": 2
},
"Gleam": {
"tab_size": 2
},
"Go": {
"tab_size": 4,
"hard_tabs": true,
"code_actions_on_format": {
"source.organizeImports": true
}
},
"Make": {
"hard_tabs": true
"Markdown": {
"tab_size": 2,
"soft_wrap": "preferred_line_length"
},
"JavaScript": {
"tab_size": 2
},
"Terraform": {
"tab_size": 2
},
"TypeScript": {
"tab_size": 2
},
"TSX": {
"tab_size": 2
},
"YAML": {
"tab_size": 2
},
"JSON": {
"tab_size": 2
},
"OCaml": {
"tab_size": 2
},
"OCaml Interface": {
"tab_size": 2
},
"Prisma": {
"tab_size": 2
}
},
// Zed's Prettier integration settings.
// If Prettier is enabled, Zed will use this for its Prettier instance for any applicable file, if
// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
// project has no other Prettier installed.
"prettier": {
// Use regular Prettier json configuration:
@@ -647,17 +642,5 @@
// Mostly useful for developers who are managing multiple instances of Zed.
"dev": {
// "theme": "Andromeda"
},
// Task-related settings.
"task": {
// Whether to show task status indicator in the status bar. Default: true
"show_status_indicator": true
},
// Whether to show full labels in line indicator or short ones
//
// Values:
// - `short`: "2 s, 15 l, 32 c"
// - `long`: "2 selections, 15 lines, 32 characters"
// Default: long
"line_indicator_format": "long"
}
}

View File

@@ -111,7 +111,7 @@
"hint": "#618399ff",
"hint.background": "#12231fff",
"hint.border": "#183934ff",
"ignored": "#6b6b73ff",
"ignored": "#aca8aeff",
"ignored.background": "#262933ff",
"ignored.border": "#2b2f38ff",
"info": "#10a793ff",

View File

@@ -111,7 +111,7 @@
"hint": "#706897ff",
"hint.background": "#161a35ff",
"hint.border": "#222953ff",
"ignored": "#756f7eff",
"ignored": "#898591ff",
"ignored.background": "#3a353fff",
"ignored.border": "#56505eff",
"info": "#566ddaff",
@@ -495,7 +495,7 @@
"hint": "#776d9dff",
"hint.background": "#e1e0f9ff",
"hint.border": "#c8c7f2ff",
"ignored": "#6e6876ff",
"ignored": "#5a5462ff",
"ignored.background": "#bfbcc5ff",
"ignored.border": "#8f8b96ff",
"info": "#586cdaff",
@@ -879,7 +879,7 @@
"hint": "#b17272ff",
"hint.background": "#171e38ff",
"hint.border": "#262f56ff",
"ignored": "#8f8b77ff",
"ignored": "#a4a08bff",
"ignored.background": "#45433bff",
"ignored.border": "#6c695cff",
"info": "#6684e0ff",
@@ -1263,7 +1263,7 @@
"hint": "#b37979ff",
"hint.background": "#e3e5faff",
"hint.border": "#cdd1f5ff",
"ignored": "#878471ff",
"ignored": "#706d5fff",
"ignored.background": "#cecab4ff",
"ignored.border": "#a8a48eff",
"info": "#6684dfff",
@@ -1647,7 +1647,7 @@
"hint": "#6f815aff",
"hint.background": "#142319ff",
"hint.border": "#1c3927ff",
"ignored": "#7d7c6aff",
"ignored": "#91907fff",
"ignored.background": "#424136ff",
"ignored.border": "#5d5c4cff",
"info": "#36a165ff",
@@ -2031,7 +2031,7 @@
"hint": "#758961ff",
"hint.background": "#d9ecdfff",
"hint.border": "#bbddc6ff",
"ignored": "#767463ff",
"ignored": "#61604fff",
"ignored.background": "#c5c4b9ff",
"ignored.border": "#969585ff",
"info": "#37a165ff",
@@ -2415,7 +2415,7 @@
"hint": "#a77087ff",
"hint.background": "#0f1c3dff",
"hint.border": "#182d5bff",
"ignored": "#8e8683ff",
"ignored": "#a79f9dff",
"ignored.background": "#443c39ff",
"ignored.border": "#665f5cff",
"info": "#407ee6ff",
@@ -2799,7 +2799,7 @@
"hint": "#a67287ff",
"hint.background": "#dfe3fbff",
"hint.border": "#c6cef7ff",
"ignored": "#837b78ff",
"ignored": "#6a6360ff",
"ignored.background": "#ccc7c5ff",
"ignored.border": "#aaa3a1ff",
"info": "#407ee6ff",
@@ -3183,7 +3183,7 @@
"hint": "#8d70a8ff",
"hint.background": "#0d1a43ff",
"hint.border": "#192961ff",
"ignored": "#908190ff",
"ignored": "#a899a8ff",
"ignored.background": "#433a43ff",
"ignored.border": "#675b67ff",
"info": "#5169ebff",
@@ -3567,7 +3567,7 @@
"hint": "#8c70a6ff",
"hint.background": "#e2dffcff",
"hint.border": "#cac7faff",
"ignored": "#857785ff",
"ignored": "#6b5e6bff",
"ignored.background": "#c6b8c6ff",
"ignored.border": "#ad9dadff",
"info": "#5169ebff",
@@ -3951,7 +3951,7 @@
"hint": "#52809aff",
"hint.background": "#121c24ff",
"hint.border": "#1a2f3cff",
"ignored": "#688c9dff",
"ignored": "#7c9fb3ff",
"ignored.background": "#33444dff",
"ignored.border": "#4f6a78ff",
"info": "#267eadff",
@@ -4335,7 +4335,7 @@
"hint": "#5a87a0ff",
"hint.background": "#d8e4eeff",
"hint.border": "#b9cee0ff",
"ignored": "#628496ff",
"ignored": "#526f7dff",
"ignored.background": "#a6cadcff",
"ignored.border": "#80a4b6ff",
"info": "#267eadff",
@@ -4719,7 +4719,7 @@
"hint": "#8a647aff",
"hint.background": "#1c1b29ff",
"hint.border": "#2c2b45ff",
"ignored": "#756e6eff",
"ignored": "#898383ff",
"ignored.background": "#3b3535ff",
"ignored.border": "#564e4eff",
"info": "#7272caff",
@@ -5103,7 +5103,7 @@
"hint": "#91697fff",
"hint.background": "#e4e1f5ff",
"hint.border": "#cecaecff",
"ignored": "#6e6666ff",
"ignored": "#5a5252ff",
"ignored.background": "#c1bbbbff",
"ignored.border": "#8e8989ff",
"info": "#7272caff",
@@ -5487,7 +5487,7 @@
"hint": "#607e76ff",
"hint.background": "#151e20ff",
"hint.border": "#1f3233ff",
"ignored": "#6f7e74ff",
"ignored": "#859188ff",
"ignored.background": "#353f39ff",
"ignored.border": "#505e55ff",
"info": "#468b8fff",
@@ -5871,7 +5871,7 @@
"hint": "#66847cff",
"hint.background": "#dae7e8ff",
"hint.border": "#bed4d6ff",
"ignored": "#68766dff",
"ignored": "#546259ff",
"ignored.background": "#bcc5bfff",
"ignored.border": "#8b968eff",
"info": "#488b90ff",
@@ -6255,7 +6255,7 @@
"hint": "#008b9fff",
"hint.background": "#051949ff",
"hint.border": "#102667ff",
"ignored": "#778f77ff",
"ignored": "#8ba48bff",
"ignored.background": "#3b453bff",
"ignored.border": "#5c6c5cff",
"info": "#3e62f4ff",
@@ -6639,7 +6639,7 @@
"hint": "#008fa1ff",
"hint.background": "#e1ddfeff",
"hint.border": "#c9c4fdff",
"ignored": "#718771ff",
"ignored": "#5f705fff",
"ignored.background": "#b4ceb4ff",
"ignored.border": "#8ea88eff",
"info": "#3e61f4ff",
@@ -7023,7 +7023,7 @@
"hint": "#6c81a5ff",
"hint.background": "#161f2bff",
"hint.border": "#203348ff",
"ignored": "#7e849eff",
"ignored": "#959bb2ff",
"ignored.background": "#3e4769ff",
"ignored.border": "#5b6385ff",
"info": "#3e8ed0ff",
@@ -7407,7 +7407,7 @@
"hint": "#7087b2ff",
"hint.background": "#dde7f6ff",
"hint.border": "#c2d5efff",
"ignored": "#767d9aff",
"ignored": "#5f6789ff",
"ignored.background": "#c1c5d8ff",
"ignored.border": "#9a9fb6ff",
"info": "#3e8fd0ff",

View File

@@ -111,7 +111,7 @@
"hint": "#628b80ff",
"hint.background": "#0d2f4eff",
"hint.border": "#1b4a6eff",
"ignored": "#696a6aff",
"ignored": "#8a8986ff",
"ignored.background": "#313337ff",
"ignored.border": "#3f4043ff",
"info": "#5ac1feff",
@@ -480,7 +480,7 @@
"hint": "#8ca7c2ff",
"hint.background": "#deebfaff",
"hint.border": "#c4daf6ff",
"ignored": "#a9acaeff",
"ignored": "#8b8e92ff",
"ignored.background": "#dcdddeff",
"ignored.border": "#cfd1d2ff",
"info": "#3b9ee5ff",
@@ -849,7 +849,7 @@
"hint": "#7399a3ff",
"hint.background": "#123950ff",
"hint.border": "#24556fff",
"ignored": "#7b7d7fff",
"ignored": "#9a9a98ff",
"ignored.background": "#464a52ff",
"ignored.border": "#53565dff",
"info": "#72cffeff",

View File

@@ -111,7 +111,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -485,7 +485,7 @@
"hint": "#6a695bff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -859,7 +859,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -1233,7 +1233,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1607,7 +1607,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1981,7 +1981,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5a6f89ff",
"hint.background": "#18243dff",
"hint.border": "#293b5bff",
"ignored": "#555a63ff",
"ignored": "#838994ff",
"ignored.background": "#3b414dff",
"ignored.border": "#464b57ff",
"info": "#74ade8ff",
@@ -485,7 +485,7 @@
"hint": "#9294beff",
"hint.background": "#e2e2faff",
"hint.border": "#cbcdf6ff",
"ignored": "#a1a1a3ff",
"ignored": "#7e8087ff",
"ignored.background": "#dcdcddff",
"ignored.border": "#c9c9caff",
"info": "#5c78e2ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5e768cff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#2f2b43ff",
"ignored": "#74708dff",
"ignored.background": "#292738ff",
"ignored.border": "#423f55ff",
"info": "#9bced6ff",
@@ -490,7 +490,7 @@
"hint": "#7a92aaff",
"hint.background": "#dde9ebff",
"hint.border": "#c3d7dbff",
"ignored": "#938fa3ff",
"ignored": "#706c8cff",
"ignored.background": "#dcd8d8ff",
"ignored.border": "#dcd6d5ff",
"info": "#57949fff",
@@ -869,7 +869,7 @@
"hint": "#728aa2ff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#605d7aff",
"ignored": "#85819eff",
"ignored.background": "#38354eff",
"ignored.border": "#504c68ff",
"info": "#9bced6ff",

View File

@@ -111,7 +111,7 @@
"hint": "#727d68ff",
"hint.background": "#171e1eff",
"hint.border": "#223131ff",
"ignored": "#827568ff",
"ignored": "#a69782ff",
"ignored.background": "#333944ff",
"ignored.border": "#3d4350ff",
"info": "#518b8bff",

View File

@@ -111,7 +111,7 @@
"hint": "#4f8297ff",
"hint.background": "#141f2cff",
"hint.border": "#1b3149ff",
"ignored": "#6f8389ff",
"ignored": "#93a1a1ff",
"ignored.background": "#073743ff",
"ignored.border": "#2b4e58ff",
"info": "#278ad1ff",
@@ -480,7 +480,7 @@
"hint": "#5789a3ff",
"hint.background": "#dbe6f6ff",
"hint.border": "#bfd3efff",
"ignored": "#6a7f86ff",
"ignored": "#34555eff",
"ignored.background": "#cfd0c4ff",
"ignored.border": "#9faaa8ff",
"info": "#288bd1ff",

View File

@@ -111,7 +111,7 @@
"hint": "#246e61ff",
"hint.background": "#0e2242ff",
"hint.border": "#193760ff",
"ignored": "#4c4735ff",
"ignored": "#736e55ff",
"ignored.background": "#2a261cff",
"ignored.border": "#302c21ff",
"info": "#499befff",

View File

@@ -16,7 +16,6 @@ doctest = false
anyhow.workspace = true
auto_update.workspace = true
editor.workspace = true
extension.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true

View File

@@ -1,6 +1,5 @@
use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage};
use editor::Editor;
use extension::ExtensionStore;
use futures::StreamExt;
use gpui::{
actions, svg, AppContext, CursorStyle, EventEmitter, InteractiveElement as _, Model,
@@ -289,18 +288,6 @@ impl ActivityIndicator {
};
}
if let Some(extension_store) =
ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
{
if let Some(extension_id) = extension_store.outstanding_operations().keys().next() {
return Content {
icon: Some(DOWNLOAD_ICON),
message: format!("Updating {extension_id} extension…"),
on_click: None,
};
}
}
Default::default()
}
}

View File

@@ -1,22 +0,0 @@
[package]
name = "anthropic"
version = "0.1.0"
edition = "2021"
publish = false
license = "AGPL-3.0-or-later"
[lib]
path = "src/anthropic.rs"
[dependencies]
anyhow.workspace = true
futures.workspace = true
serde.workspace = true
serde_json.workspace = true
util.workspace = true
[dev-dependencies]
tokio.workspace = true
[lints]
workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-AGPL

View File

@@ -1,234 +0,0 @@
use anyhow::{anyhow, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use util::http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku-20240307")]
Claude3Haiku,
}
impl Model {
pub fn from_id(id: &str) -> Result<Self> {
if id.starts_with("claude-3-opus") {
Ok(Self::Claude3Opus)
} else if id.starts_with("claude-3-sonnet") {
Ok(Self::Claude3Sonnet)
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
Err(anyhow!("Invalid model id: {}", id))
}
}
pub fn display_name(&self) -> &'static str {
match self {
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
}
}
pub fn max_token_count(&self) -> usize {
200_000
}
}
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
}
impl TryFrom<String> for Role {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self> {
match value.as_str() {
"user" => Ok(Self::User),
"assistant" => Ok(Self::Assistant),
_ => Err(anyhow!("invalid role '{value}'")),
}
}
}
impl From<Role> for String {
fn from(val: Role) -> Self {
match val {
Role::User => "user".to_owned(),
Role::Assistant => "assistant".to_owned(),
}
}
}
#[derive(Debug, Serialize)]
pub struct Request {
pub model: Model,
pub messages: Vec<RequestMessage>,
pub stream: bool,
pub system: String,
pub max_tokens: u32,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,
pub content: String,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ResponseEvent {
MessageStart {
message: ResponseMessage,
},
ContentBlockStart {
index: u32,
content_block: ContentBlock,
},
Ping {},
ContentBlockDelta {
index: u32,
delta: TextDelta,
},
ContentBlockStop {
index: u32,
},
MessageDelta {
delta: ResponseMessage,
usage: Usage,
},
MessageStop {},
}
#[derive(Deserialize, Debug)]
pub struct ResponseMessage {
#[serde(rename = "type")]
pub message_type: Option<String>,
pub id: Option<String>,
pub role: Option<String>,
pub content: Option<Vec<String>>,
pub model: Option<String>,
pub stop_reason: Option<String>,
pub stop_sequence: Option<String>,
pub usage: Option<Usage>,
}
#[derive(Deserialize, Debug)]
pub struct Usage {
pub input_tokens: Option<u32>,
pub output_tokens: Option<u32>,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum TextDelta {
TextDelta { text: String },
}
pub async fn stream_completion(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let uri = format!("{api_url}/v1/messages");
let request = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", "messages-2023-12-15")
.header("X-Api-Key", api_key)
.header("Content-Type", "application/json")
.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let reader = BufReader::new(response.into_body());
Ok(reader
.lines()
.filter_map(|line| async move {
match line {
Ok(line) => {
let line = line.strip_prefix("data: ")?;
match serde_json::from_str(line) {
Ok(response) => Some(Ok(response)),
Err(error) => Some(Err(anyhow!(error))),
}
}
Err(error) => Some(Err(anyhow!(error))),
}
})
.boxed())
} else {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
match serde_json::from_str::<ResponseEvent>(body_str) {
Ok(_) => Err(anyhow!(
"Unexpected success response while expecting an error: {}",
body_str,
)),
Err(_) => Err(anyhow!(
"Failed to connect to API: {} {}",
response.status(),
body_str,
)),
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use util::http::IsahcHttpClient;
// #[tokio::test]
// async fn stream_completion_success() {
// let http_client = IsahcHttpClient::new().unwrap();
// let request = Request {
// model: Model::Claude3Opus,
// messages: vec![RequestMessage {
// role: Role::User,
// content: "Ping".to_string(),
// }],
// stream: true,
// system: "Respond to ping with pong".to_string(),
// max_tokens: 4096,
// };
// let stream = stream_completion(
// &http_client,
// "https://api.anthropic.com",
// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"),
// request,
// )
// .await
// .unwrap();
// stream
// .for_each(|event| async {
// match event {
// Ok(event) => println!("{:?}", event),
// Err(e) => eprintln!("Error: {:?}", e),
// }
// })
// .await;
// }
// }

View File

@@ -16,7 +16,6 @@ client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
editor.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -6,8 +6,6 @@ mod prompts;
mod saved_conversation;
mod streaming_diff;
mod embedded_scope;
pub use assistant_panel::AssistantPanel;
use assistant_settings::{AssistantSettings, OpenAiModel, ZedDotDevModel};
use chrono::{DateTime, Local};

View File

@@ -1,14 +1,13 @@
use crate::{
assistant_settings::{AssistantDockPosition, AssistantSettings, ZedDotDevModel},
codegen::{self, Codegen, CodegenKind},
embedded_scope::EmbeddedScope,
prompts::generate_content_prompt,
Assist, CompletionProvider, CycleMessageRole, InlineAssist, LanguageModel,
LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata, MessageStatus,
NewConversation, QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata,
SavedMessage, Split, ToggleFocus, ToggleIncludeConversation,
};
use anyhow::{anyhow, Result};
use anyhow::Result;
use chrono::{DateTime, Local};
use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
@@ -17,10 +16,9 @@ use editor::{
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBuffer, MultiBufferSnapshot,
ToOffset as _, ToPoint,
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBufferSnapshot, ToOffset as _,
ToPoint,
};
use file_icons::FileIcons;
use fs::Fs;
use futures::StreamExt;
use gpui::{
@@ -31,7 +29,7 @@ use gpui::{
StatefulInteractiveElement, Styled, Subscription, Task, TextStyle, UniformListScrollHandle,
View, ViewContext, VisualContext, WeakModel, WeakView, WhiteSpace, WindowContext,
};
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry, ToOffset as _};
use language::{language_settings::SoftWrap, Buffer, BufferId, LanguageRegistry, ToOffset as _};
use parking_lot::Mutex;
use project::Project;
use search::{buffer_search::DivRegistrar, BufferSearchBar};
@@ -46,11 +44,10 @@ use ui::{
};
use util::{paths::CONVERSATIONS_DIR, post_inc, ResultExt, TryFutureExt};
use uuid::Uuid;
use workspace::notifications::NotificationId;
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
searchable::Direction,
Event as WorkspaceEvent, Save, Toast, ToggleZoom, Toolbar, Workspace,
Save, Toast, ToggleZoom, Toolbar, Workspace,
};
pub fn init(cx: &mut AppContext) {
@@ -163,11 +160,6 @@ impl AssistantPanel {
];
let model = CompletionProvider::global(cx).default_model();
cx.observe_global::<FileIcons>(|_, cx| {
cx.notify();
})
.detach();
Self {
workspace: workspace_handle,
active_conversation_editor: None,
@@ -346,7 +338,7 @@ impl AssistantPanel {
style: BlockStyle::Flex,
position: snapshot.anchor_before(point_selection.head()),
height: 2,
render: Box::new({
render: Arc::new({
let inline_assistant = inline_assistant.clone();
move |cx: &mut BlockContext| {
*measurements.lock() = BlockMeasurements {
@@ -419,14 +411,10 @@ impl AssistantPanel {
if pending_assist.inline_assistant.is_none() {
if let Some(workspace) = this.workspace.upgrade() {
workspace.update(cx, |workspace, cx| {
struct InlineAssistantError;
let id =
NotificationId::identified::<InlineAssistantError>(
inline_assist_id,
);
workspace.show_toast(Toast::new(id, error), cx);
workspace.show_toast(
Toast::new(inline_assist_id, error),
cx,
);
})
}
@@ -625,10 +613,10 @@ impl AssistantPanel {
// If Markdown or No Language is Known, increase the randomness for more creative output
// If Code, decrease temperature to get more deterministic outputs
let temperature = if let Some(language) = language_name.clone() {
if language.as_ref() == "Markdown" {
1.0
} else {
if language.as_ref() != "Markdown" {
0.5
} else {
1.0
}
} else {
1.0
@@ -700,8 +688,8 @@ impl AssistantPanel {
editor.clear_background_highlights::<PendingInlineAssist>(cx);
} else {
editor.highlight_background::<PendingInlineAssist>(
&background_ranges,
|theme| theme.editor_active_line_background, // TODO use the appropriate color
background_ranges,
|theme| theme.editor_active_line_background, // todo!("use the appropriate color")
cx,
);
}
@@ -721,20 +709,18 @@ impl AssistantPanel {
});
}
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ConversationEditor>> {
let workspace = self.workspace.upgrade()?;
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> View<ConversationEditor> {
let editor = cx.new_view(|cx| {
ConversationEditor::new(
self.model.clone(),
self.languages.clone(),
self.fs.clone(),
workspace,
self.workspace.clone(),
cx,
)
});
self.show_conversation(editor.clone(), cx);
Some(editor)
editor
}
fn show_conversation(
@@ -773,18 +759,15 @@ impl AssistantPanel {
open_ai::Model::FourTurbo => open_ai::Model::ThreePointFiveTurbo,
}),
LanguageModel::ZedDotDev(model) => LanguageModel::ZedDotDev(match &model {
ZedDotDevModel::Gpt3Point5Turbo => ZedDotDevModel::Gpt4,
ZedDotDevModel::Gpt4 => ZedDotDevModel::Gpt4Turbo,
ZedDotDevModel::Gpt4Turbo => ZedDotDevModel::Claude3Opus,
ZedDotDevModel::Claude3Opus => ZedDotDevModel::Claude3Sonnet,
ZedDotDevModel::Claude3Sonnet => ZedDotDevModel::Claude3Haiku,
ZedDotDevModel::Claude3Haiku => {
ZedDotDevModel::GptThreePointFiveTurbo => ZedDotDevModel::GptFour,
ZedDotDevModel::GptFour => ZedDotDevModel::GptFourTurbo,
ZedDotDevModel::GptFourTurbo => {
match CompletionProvider::global(cx).default_model() {
LanguageModel::ZedDotDev(custom) => custom,
_ => ZedDotDevModel::Gpt3Point5Turbo,
_ => ZedDotDevModel::GptThreePointFiveTurbo,
}
}
ZedDotDevModel::Custom(_) => ZedDotDevModel::Gpt3Point5Turbo,
ZedDotDevModel::Custom(_) => ZedDotDevModel::GptThreePointFiveTurbo,
}),
};
@@ -1006,15 +989,11 @@ impl AssistantPanel {
.await?;
this.update(&mut cx, |this, cx| {
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace dropped"))?;
let editor = cx.new_view(|cx| {
ConversationEditor::for_conversation(conversation, fs, workspace, cx)
});
this.show_conversation(editor, cx);
anyhow::Ok(())
})??;
})?;
Ok(())
})
}
@@ -1285,10 +1264,9 @@ struct Summary {
done: bool,
}
pub struct Conversation {
struct Conversation {
id: Option<String>,
buffer: Model<Buffer>,
embedded_scope: EmbeddedScope,
message_anchors: Vec<MessageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
next_message_id: MessageId,
@@ -1310,12 +1288,11 @@ impl Conversation {
fn new(
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
embedded_scope: EmbeddedScope,
cx: &mut ModelContext<Self>,
) -> Self {
let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::local("", cx);
let mut buffer = Buffer::new(0, BufferId::new(cx.entity_id().as_u64()).unwrap(), "");
buffer.set_language_registry(language_registry);
cx.spawn(|buffer, mut cx| async move {
let markdown = markdown.await?;
@@ -1344,9 +1321,7 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: None,
buffer,
embedded_scope,
};
let message = MessageAnchor {
id: MessageId(post_inc(&mut this.next_message_id.0)),
start: language::Anchor::MIN,
@@ -1403,7 +1378,11 @@ impl Conversation {
let mut message_anchors = Vec::new();
let mut next_message_id = MessageId(0);
let buffer = cx.new_model(|cx| {
let mut buffer = Buffer::local(saved_conversation.text, cx);
let mut buffer = Buffer::new(
0,
BufferId::new(cx.entity_id().as_u64()).unwrap(),
saved_conversation.text,
);
for message in saved_conversation.messages {
message_anchors.push(MessageAnchor {
id: message.id,
@@ -1443,7 +1422,6 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: Some(path),
buffer,
embedded_scope: EmbeddedScope::new(),
};
this.count_remaining_tokens(cx);
this
@@ -1462,7 +1440,7 @@ impl Conversation {
}
}
pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
let request = self.to_completion_request(cx);
self.pending_token_count = cx.spawn(|this, mut cx| {
async move {
@@ -1625,7 +1603,7 @@ impl Conversation {
}
fn to_completion_request(&self, cx: &mut ModelContext<Conversation>) -> LanguageModelRequest {
let mut request = LanguageModelRequest {
let request = LanguageModelRequest {
model: self.model.clone(),
messages: self
.messages(cx)
@@ -1635,9 +1613,6 @@ impl Conversation {
stop: vec![],
temperature: 1.0,
};
let context_message = self.embedded_scope.message(cx);
request.messages.extend(context_message);
request
}
@@ -2027,18 +2002,17 @@ impl ConversationEditor {
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let conversation = cx
.new_model(|cx| Conversation::new(model, language_registry, EmbeddedScope::new(), cx));
let conversation = cx.new_model(|cx| Conversation::new(model, language_registry, cx));
Self::for_conversation(conversation, fs, workspace, cx)
}
fn for_conversation(
conversation: Model<Conversation>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let editor = cx.new_view(|cx| {
@@ -2053,7 +2027,6 @@ impl ConversationEditor {
cx.observe(&conversation, |_, _, cx| cx.notify()),
cx.subscribe(&conversation, Self::handle_conversation_event),
cx.subscribe(&editor, Self::handle_editor_event),
cx.subscribe(&workspace, Self::handle_workspace_event),
];
let mut this = Self {
@@ -2062,10 +2035,9 @@ impl ConversationEditor {
blocks: Default::default(),
scroll_position: None,
fs,
workspace: workspace.downgrade(),
workspace,
_subscriptions,
};
cx.defer(|this, cx| this.update_active_buffer(workspace, cx));
this.update_message_headers(cx);
this
}
@@ -2199,37 +2171,6 @@ impl ConversationEditor {
}
}
fn handle_workspace_event(
&mut self,
workspace: View<Workspace>,
event: &WorkspaceEvent,
cx: &mut ViewContext<Self>,
) {
if let WorkspaceEvent::ActiveItemChanged = event {
self.update_active_buffer(workspace, cx);
}
}
fn update_active_buffer(
&mut self,
workspace: View<Workspace>,
cx: &mut ViewContext<'_, ConversationEditor>,
) {
let active_buffer = workspace
.read(cx)
.active_item(cx)
.and_then(|item| Some(item.act_as::<Editor>(cx)?.read(cx).buffer().clone()));
self.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer(active_buffer.clone(), cx);
conversation.count_remaining_tokens(cx);
cx.notify();
});
}
fn cursor_scroll_position(&self, cx: &mut ViewContext<Self>) -> Option<ScrollPosition> {
self.editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -2267,7 +2208,7 @@ impl ConversationEditor {
.unwrap(),
height: 2,
style: BlockStyle::Sticky,
render: Box::new({
render: Arc::new({
let conversation = self.conversation.clone();
move |_cx| {
let message_id = message.id;
@@ -2363,11 +2304,11 @@ impl ConversationEditor {
let start_language = buffer.language_at(range.start);
let end_language = buffer.language_at(range.end);
let language_name = if start_language == end_language {
start_language.map(|language| language.code_fence_block_name())
start_language.map(|language| language.name())
} else {
None
};
let language_name = language_name.as_deref().unwrap_or("");
let language_name = language_name.as_deref().unwrap_or("").to_lowercase();
let selected_text = buffer.text_for_range(range).collect::<String>();
let text = if selected_text.is_empty() {
@@ -2391,17 +2332,15 @@ impl ConversationEditor {
if let Some(text) = text {
panel.update(cx, |panel, cx| {
if let Some(conversation) = panel
let conversation = panel
.active_conversation_editor()
.cloned()
.or_else(|| panel.new_conversation(cx))
{
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
};
.unwrap_or_else(|| panel.new_conversation(cx));
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
});
}
}
@@ -2466,120 +2405,12 @@ impl ConversationEditor {
.map(|summary| summary.text.clone())
.unwrap_or_else(|| "New Conversation".into())
}
fn render_embedded_scope(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
let active_buffer = self
.conversation
.read(cx)
.embedded_scope
.active_buffer()?
.clone();
Some(
div()
.p_4()
.v_flex()
.child(
div()
.h_flex()
.items_center()
.child(Icon::new(IconName::File))
.child(
div()
.h_6()
.child(Label::new("File Contexts"))
.ml_1()
.font_weight(FontWeight::SEMIBOLD),
),
)
.child(
div()
.ml_4()
.child(self.render_active_buffer(active_buffer, cx)),
),
)
}
fn render_active_buffer(
&self,
buffer: Model<MultiBuffer>,
cx: &mut ViewContext<Self>,
) -> impl Element {
let buffer = buffer.read(cx);
let icon_path;
let path;
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
path = singleton.file().map(|file| file.full_path(cx));
icon_path = path
.as_ref()
.and_then(|path| FileIcons::get_icon(path.as_path(), cx))
.map(SharedString::from)
.unwrap_or_else(|| SharedString::from("icons/file_icons/file.svg"));
} else {
icon_path = SharedString::from("icons/file_icons/file.svg");
path = None;
}
let file_name = path.map_or("Untitled".to_string(), |path| {
path.to_string_lossy().to_string()
});
let enabled = self
.conversation
.read(cx)
.embedded_scope
.active_buffer_enabled();
let file_name_text_color = if enabled {
Color::Default
} else {
Color::Disabled
};
div()
.id("active-buffer")
.h_flex()
.cursor_pointer()
.child(Icon::from_path(icon_path).color(file_name_text_color))
.child(
div()
.h_6()
.child(Label::new(file_name).color(file_name_text_color))
.ml_1(),
)
.children(enabled.then(|| {
div()
.child(Icon::new(IconName::Check).color(file_name_text_color))
.ml_1()
}))
.on_click(cx.listener(move |this, _, cx| {
this.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer_enabled(!enabled);
cx.notify();
})
}))
}
}
impl EventEmitter<ConversationEditorEvent> for ConversationEditor {}
impl Render for ConversationEditor {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl Element {
//
// The ConversationEditor has two main segments
//
// 1. Messages Editor
// 2. Context
// - File Context (currently only the active file)
// - Project Diagnostics (Planned)
// - Deep Code Context (Planned, for query and other tools for the model)
//
div()
.key_context("ConversationEditor")
.capture_action(cx.listener(ConversationEditor::cancel_last_assist))
@@ -2589,15 +2420,14 @@ impl Render for ConversationEditor {
.on_action(cx.listener(ConversationEditor::assist))
.on_action(cx.listener(ConversationEditor::split))
.size_full()
.v_flex()
.relative()
.child(
div()
.flex_grow()
.size_full()
.pl_4()
.bg(cx.theme().colors().editor_background)
.child(self.editor.clone()),
)
.child(div().flex_shrink().children(self.render_embedded_scope(cx)))
}
}
@@ -2969,9 +2799,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3102,9 +2931,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3202,9 +3030,8 @@ mod tests {
cx.set_global(settings_store);
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3288,14 +3115,8 @@ mod tests {
cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default()));
cx.update(init);
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
let conversation = cx.new_model(|cx| {
Conversation::new(
LanguageModel::default(),
registry.clone(),
EmbeddedScope::new(),
cx,
)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry.clone(), cx));
let buffer = conversation.read_with(cx, |conversation, _| conversation.buffer.clone());
let message_0 =
conversation.read_with(cx, |conversation, _| conversation.message_anchors[0].id);

View File

@@ -10,17 +10,14 @@ use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use settings::{Settings, SettingsSources};
use settings::Settings;
#[derive(Clone, Debug, Default, PartialEq)]
pub enum ZedDotDevModel {
Gpt3Point5Turbo,
Gpt4,
GptThreePointFiveTurbo,
GptFour,
#[default]
Gpt4Turbo,
Claude3Opus,
Claude3Sonnet,
Claude3Haiku,
GptFourTurbo,
Custom(String),
}
@@ -52,9 +49,9 @@ impl<'de> Deserialize<'de> for ZedDotDevModel {
E: de::Error,
{
match value {
"gpt-3.5-turbo" => Ok(ZedDotDevModel::Gpt3Point5Turbo),
"gpt-4" => Ok(ZedDotDevModel::Gpt4),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::Gpt4Turbo),
"gpt-3.5-turbo" => Ok(ZedDotDevModel::GptThreePointFiveTurbo),
"gpt-4" => Ok(ZedDotDevModel::GptFour),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::GptFourTurbo),
_ => Ok(ZedDotDevModel::Custom(value.to_owned())),
}
}
@@ -97,34 +94,27 @@ impl JsonSchema for ZedDotDevModel {
impl ZedDotDevModel {
pub fn id(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4Turbo => "gpt-4-turbo-preview",
Self::Claude3Opus => "claude-3-opus",
Self::Claude3Sonnet => "claude-3-sonnet",
Self::Claude3Haiku => "claude-3-haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo-preview",
Self::Custom(id) => id,
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
Self::Gpt4 => "GPT 4",
Self::Gpt4Turbo => "GPT 4 Turbo",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo",
Self::Custom(id) => id.as_str(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Gpt3Point5Turbo => 2048,
Self::Gpt4 => 4096,
Self::Gpt4Turbo => 128000,
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 200000,
Self::GptThreePointFiveTurbo => 2048,
Self::GptFour => 4096,
Self::GptFourTurbo => 128000,
Self::Custom(_) => 4096, // TODO: Make this configurable
}
}
@@ -332,12 +322,13 @@ impl Settings for AssistantSettings {
type FileContent = AssistantSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
let mut settings = AssistantSettings::default();
for value in sources.defaults_and_customizations() {
for value in [default_value].iter().chain(user_values) {
let value = value.upgrade();
merge(&mut settings.enabled, value.enabled);
merge(&mut settings.button, value.button);

View File

@@ -361,8 +361,8 @@ mod tests {
use gpui::{Context, TestAppContext};
use indoc::indoc;
use language::{
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
Point,
language_settings, tree_sitter_rust, Buffer, BufferId, Language, LanguageConfig,
LanguageMatcher, Point,
};
use rand::prelude::*;
use serde::Serialize;
@@ -388,8 +388,9 @@ mod tests {
}
}
"};
let buffer =
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
@@ -446,8 +447,9 @@ mod tests {
le
}
"};
let buffer =
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
@@ -504,8 +506,9 @@ mod tests {
" \n",
"}\n" //
);
let buffer =
cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| {
Buffer::new(0, BufferId::new(1).unwrap(), text).with_language(Arc::new(rust_lang()), cx)
});
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);

View File

@@ -1,5 +1,5 @@
use crate::{
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider,
LanguageModelRequest,
};
use anyhow::{anyhow, Result};
@@ -78,21 +78,13 @@ impl ZedDotDevCompletionProvider {
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
match request.model {
LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Turbo)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt3Point5Turbo) => {
crate::LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFour)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFourTurbo)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptThreePointFiveTurbo) => {
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(
ZedDotDevModel::Claude3Opus
| ZedDotDevModel::Claude3Sonnet
| ZedDotDevModel::Claude3Haiku,
) => {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
crate::LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model,
messages: request

View File

@@ -1,91 +0,0 @@
use editor::MultiBuffer;
use gpui::{AppContext, Model, ModelContext, Subscription};
use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role};
#[derive(Default)]
pub struct EmbeddedScope {
active_buffer: Option<Model<MultiBuffer>>,
active_buffer_enabled: bool,
active_buffer_subscription: Option<Subscription>,
}
impl EmbeddedScope {
pub fn new() -> Self {
Self {
active_buffer: None,
active_buffer_enabled: true,
active_buffer_subscription: None,
}
}
pub fn set_active_buffer(
&mut self,
buffer: Option<Model<MultiBuffer>>,
cx: &mut ModelContext<Conversation>,
) {
self.active_buffer_subscription.take();
if let Some(active_buffer) = buffer.clone() {
self.active_buffer_subscription =
Some(cx.subscribe(&active_buffer, |conversation, _, e, cx| {
if let multi_buffer::Event::Edited { .. } = e {
conversation.count_remaining_tokens(cx)
}
}));
}
self.active_buffer = buffer;
}
pub fn active_buffer(&self) -> Option<&Model<MultiBuffer>> {
self.active_buffer.as_ref()
}
pub fn active_buffer_enabled(&self) -> bool {
self.active_buffer_enabled
}
pub fn set_active_buffer_enabled(&mut self, enabled: bool) {
self.active_buffer_enabled = enabled;
}
/// Provide a message for the language model based on the active buffer.
pub fn message(&self, cx: &AppContext) -> Option<LanguageModelRequestMessage> {
if !self.active_buffer_enabled {
return None;
}
let active_buffer = self.active_buffer.as_ref()?;
let buffer = active_buffer.read(cx);
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
let filename = singleton
.file()
.map(|file| file.path().to_string_lossy())
.unwrap_or("Untitled".into());
let text = singleton.text();
let language = singleton
.language()
.map(|l| {
let name = l.code_fence_block_name();
name.to_string()
})
.unwrap_or_default();
let markdown =
format!("User's active file `{filename}`:\n\n```{language}\n{text}```\n\n");
return Some(LanguageModelRequestMessage {
role: Role::System,
content: markdown,
});
}
None
}
}

View File

@@ -11,13 +11,13 @@ use gpui::{
};
use isahc::AsyncBody;
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use markdown_preview::markdown_preview_view::MarkdownPreviewView;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
use smol::io::AsyncReadExt;
use settings::{Settings, SettingsSources, SettingsStore};
use settings::{Settings, SettingsStore};
use smol::{fs::File, process::Command};
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
@@ -32,7 +32,6 @@ use util::{
http::{HttpClient, HttpClientWithUrl},
ResultExt,
};
use workspace::notifications::NotificationId;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
@@ -83,22 +82,25 @@ struct AutoUpdateSetting(bool);
/// Whether or not to automatically check for updates.
///
/// Default: true
#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
#[derive(Clone, Default, JsonSchema, Deserialize, Serialize)]
#[serde(transparent)]
struct AutoUpdateSettingContent(bool);
struct AutoUpdateSettingOverride(Option<bool>);
impl Settings for AutoUpdateSetting {
const KEY: Option<&'static str> = Some("auto_update");
type FileContent = Option<AutoUpdateSettingContent>;
type FileContent = AutoUpdateSettingOverride;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
let auto_update = [sources.release_channel, sources.user]
.into_iter()
.find_map(|value| value.copied().flatten())
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
Ok(Self(auto_update.0))
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self(
Self::json_merge(default_value, user_values)?
.0
.ok_or_else(Self::missing_default)?,
))
}
}
@@ -236,11 +238,10 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
.new_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx));
let workspace_handle = workspace.weak_handle();
let view: View<MarkdownPreviewView> = MarkdownPreviewView::new(
MarkdownPreviewMode::Default,
editor,
workspace_handle,
language_registry,
Some(tab_description),
language_registry,
cx,
);
workspace.add_item_to_active_pane(Box::new(view.clone()), cx);
@@ -263,11 +264,9 @@ pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
workspace.update(&mut cx, |workspace, cx| {
workspace.show_notification(
NotificationId::unique::<UpdateNotification>(),
cx,
|cx| cx.new_view(|_| UpdateNotification::new(version)),
);
workspace.show_notification(0, cx, |cx| {
cx.new_view(|_| UpdateNotification::new(version))
});
updater
.read(cx)
.set_should_show_update_notification(false, cx)

View File

@@ -52,22 +52,15 @@ impl Render for Breadcrumbs {
Some(BreadcrumbText {
text: "".into(),
highlights: None,
font: None,
}),
);
}
let highlighted_segments = segments.into_iter().map(|segment| {
let mut text_style = cx.text_style();
if let Some(font) = segment.font {
text_style.font_family = font.family;
text_style.font_features = font.features;
text_style.font_style = font.style;
text_style.font_weight = font.weight;
}
text_style.color = Color::Muted.color(cx);
StyledText::new(segment.text.replace('\n', ""))
StyledText::new(segment.text)
.with_highlights(&text_style, segment.highlights.unwrap_or_default())
.into_any()
});

View File

@@ -373,10 +373,7 @@ impl ActiveCall {
self.report_call_event("hang up", cx);
Audio::end_call(cx);
let channel_id = self.channel_id(cx);
if let Some((room, _)) = self.room.take() {
cx.emit(Event::RoomLeft { channel_id });
room.update(cx, |room, cx| room.leave(cx))
} else {
Task::ready(Ok(()))
@@ -432,9 +429,7 @@ impl ActiveCall {
room: Option<Model<Room>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
if room.as_ref() == self.room.as_ref().map(|room| &room.0) {
Task::ready(Ok(()))
} else {
if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
cx.notify();
if let Some(room) = room {
if room.read(cx).status().is_offline() {
@@ -464,6 +459,8 @@ impl ActiveCall {
self.room = None;
Task::ready(Ok(()))
}
} else {
Task::ready(Ok(()))
}
}

View File

@@ -2,7 +2,7 @@ use anyhow::Result;
use gpui::AppContext;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use settings::Settings;
#[derive(Deserialize, Debug)]
pub struct CallSettings {
@@ -29,7 +29,14 @@ impl Settings for CallSettings {
type FileContent = CallSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
sources.json_merge()
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_cx: &mut AppContext,
) -> Result<Self>
where
Self: Sized,
{
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@@ -52,7 +52,7 @@ pub enum Event {
RemoteProjectInvitationDiscarded {
project_id: u64,
},
RoomLeft {
Left {
channel_id: Option<ChannelId>,
},
}
@@ -366,6 +366,9 @@ impl Room {
pub(crate) fn leave(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
cx.notify();
cx.emit(Event::Left {
channel_id: self.channel_id(),
});
self.leave_internal(cx)
}
@@ -1182,7 +1185,7 @@ impl Room {
cx.emit(Event::RemoteProjectJoined { project_id: id });
cx.spawn(move |this, mut cx| async move {
let project =
Project::in_room(id, client, user_store, language_registry, fs, cx.clone()).await?;
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(&mut cx, |this, cx| {
this.joined_projects.retain(|project| {

View File

@@ -11,9 +11,7 @@ pub use channel_chat::{
mentions_to_proto, ChannelChat, ChannelChatEvent, ChannelMessage, ChannelMessageId,
MessageParams,
};
pub use channel_store::{
Channel, ChannelEvent, ChannelMembership, ChannelStore, DevServer, RemoteProject,
};
pub use channel_store::{Channel, ChannelEvent, ChannelMembership, ChannelStore};
#[cfg(test)]
mod channel_store_tests;

View File

@@ -222,9 +222,6 @@ impl ChannelChat {
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
if this.first_loaded_message_id.is_none() {
this.first_loaded_message_id = Some(id);
}
})?;
Ok(id)
}))
@@ -652,27 +649,13 @@ impl ChannelChat {
let mut messages = cursor.slice(&ChannelMessageId::Saved(id), Bias::Left, &());
if let Some(item) = cursor.item() {
if item.id == ChannelMessageId::Saved(id) {
let deleted_message_ix = messages.summary().count;
let ix = messages.summary().count;
cursor.next(&());
messages.append(cursor.suffix(&()), &());
drop(cursor);
self.messages = messages;
// If the message that was deleted was the last acknowledged message,
// replace the acknowledged message with an earlier one.
self.channel_store.update(cx, |store, _| {
let summary = self.messages.summary();
if summary.count == 0 {
store.set_acknowledged_message_id(self.channel_id, None);
} else if deleted_message_ix == summary.count {
if let ChannelMessageId::Saved(id) = summary.max_id {
store.set_acknowledged_message_id(self.channel_id, Some(id));
}
}
});
cx.emit(ChannelChatEvent::MessagesUpdated {
old_range: deleted_message_ix..deleted_message_ix + 1,
old_range: ix..ix + 1,
new_count: 0,
});
}

View File

@@ -3,10 +3,7 @@ mod channel_index;
use crate::{channel_buffer::ChannelBuffer, channel_chat::ChannelChat, ChannelMessage};
use anyhow::{anyhow, Result};
use channel_index::ChannelIndex;
use client::{
ChannelId, Client, ClientSettings, DevServerId, ProjectId, RemoteProjectId, Subscription, User,
UserId, UserStore,
};
use client::{ChannelId, Client, ClientSettings, ProjectId, Subscription, User, UserId, UserStore};
use collections::{hash_map, HashMap, HashSet};
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
use gpui::{
@@ -15,7 +12,7 @@ use gpui::{
};
use language::Capability;
use rpc::{
proto::{self, ChannelRole, ChannelVisibility, DevServerStatus},
proto::{self, ChannelRole, ChannelVisibility},
TypedEnvelope,
};
use settings::Settings;
@@ -43,6 +40,7 @@ pub struct HostedProject {
name: SharedString,
_visibility: proto::ChannelVisibility,
}
impl From<proto::HostedProject> for HostedProject {
fn from(project: proto::HostedProject) -> Self {
Self {
@@ -54,56 +52,12 @@ impl From<proto::HostedProject> for HostedProject {
}
}
#[derive(Debug, Clone)]
pub struct RemoteProject {
pub id: RemoteProjectId,
pub project_id: Option<ProjectId>,
pub channel_id: ChannelId,
pub name: SharedString,
pub path: SharedString,
pub dev_server_id: DevServerId,
}
impl From<proto::RemoteProject> for RemoteProject {
fn from(project: proto::RemoteProject) -> Self {
Self {
id: RemoteProjectId(project.id),
project_id: project.project_id.map(|id| ProjectId(id)),
channel_id: ChannelId(project.channel_id),
name: project.name.into(),
path: project.path.into(),
dev_server_id: DevServerId(project.dev_server_id),
}
}
}
#[derive(Debug, Clone)]
pub struct DevServer {
pub id: DevServerId,
pub channel_id: ChannelId,
pub name: SharedString,
pub status: DevServerStatus,
}
impl From<proto::DevServer> for DevServer {
fn from(dev_server: proto::DevServer) -> Self {
Self {
id: DevServerId(dev_server.dev_server_id),
channel_id: ChannelId(dev_server.channel_id),
status: dev_server.status(),
name: dev_server.name.into(),
}
}
}
pub struct ChannelStore {
pub channel_index: ChannelIndex,
channel_invitations: Vec<Arc<Channel>>,
channel_participants: HashMap<ChannelId, Vec<Arc<User>>>,
channel_states: HashMap<ChannelId, ChannelState>,
hosted_projects: HashMap<ProjectId, HostedProject>,
remote_projects: HashMap<RemoteProjectId, RemoteProject>,
dev_servers: HashMap<DevServerId, DevServer>,
outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
@@ -133,8 +87,6 @@ pub struct ChannelState {
observed_chat_message: Option<u64>,
role: Option<ChannelRole>,
projects: HashSet<ProjectId>,
dev_servers: HashSet<DevServerId>,
remote_projects: HashSet<RemoteProjectId>,
}
impl Channel {
@@ -265,8 +217,6 @@ impl ChannelStore {
channel_index: ChannelIndex::default(),
channel_participants: Default::default(),
hosted_projects: Default::default(),
remote_projects: Default::default(),
dev_servers: Default::default(),
outgoing_invites: Default::default(),
opened_buffers: Default::default(),
opened_chats: Default::default(),
@@ -366,40 +316,6 @@ impl ChannelStore {
projects
}
pub fn dev_servers_for_id(&self, channel_id: ChannelId) -> Vec<DevServer> {
let mut dev_servers: Vec<DevServer> = self
.channel_states
.get(&channel_id)
.map(|state| state.dev_servers.clone())
.unwrap_or_default()
.into_iter()
.flat_map(|id| self.dev_servers.get(&id).cloned())
.collect();
dev_servers.sort_by_key(|s| (s.name.clone(), s.id));
dev_servers
}
pub fn find_dev_server_by_id(&self, id: DevServerId) -> Option<&DevServer> {
self.dev_servers.get(&id)
}
pub fn find_remote_project_by_id(&self, id: RemoteProjectId) -> Option<&RemoteProject> {
self.remote_projects.get(&id)
}
pub fn remote_projects_for_id(&self, channel_id: ChannelId) -> Vec<RemoteProject> {
let mut remote_projects: Vec<RemoteProject> = self
.channel_states
.get(&channel_id)
.map(|state| state.remote_projects.clone())
.unwrap_or_default()
.into_iter()
.flat_map(|id| self.remote_projects.get(&id).cloned())
.collect();
remote_projects.sort_by_key(|p| (p.name.clone(), p.id));
remote_projects
}
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, _cx: &AppContext) -> bool {
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
if let OpenedModelHandle::Open(buffer) = buffer {
@@ -464,12 +380,6 @@ impl ChannelStore {
.is_some_and(|state| state.has_new_messages())
}
pub fn set_acknowledged_message_id(&mut self, channel_id: ChannelId, message_id: Option<u64>) {
if let Some(state) = self.channel_states.get_mut(&channel_id) {
state.latest_chat_message = message_id;
}
}
pub fn last_acknowledge_message_id(&self, channel_id: ChannelId) -> Option<u64> {
self.channel_states.get(&channel_id).and_then(|state| {
if let Some(last_message_id) = state.latest_chat_message {
@@ -902,45 +812,6 @@ impl ChannelStore {
})
}
pub fn create_remote_project(
&mut self,
channel_id: ChannelId,
dev_server_id: DevServerId,
name: String,
path: String,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateRemoteProjectResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
client
.request(proto::CreateRemoteProject {
channel_id: channel_id.0,
dev_server_id: dev_server_id.0,
name,
path,
})
.await
})
}
pub fn create_dev_server(
&mut self,
channel_id: ChannelId,
name: String,
cx: &mut ModelContext<Self>,
) -> Task<Result<proto::CreateDevServerResponse>> {
let client = self.client.clone();
cx.background_executor().spawn(async move {
let result = client
.request(proto::CreateDevServer {
channel_id: channel_id.0,
name,
})
.await?;
Ok(result)
})
}
pub fn get_channel_member_details(
&self,
channel_id: ChannelId,
@@ -1221,11 +1092,7 @@ impl ChannelStore {
|| !payload.latest_channel_message_ids.is_empty()
|| !payload.latest_channel_buffer_versions.is_empty()
|| !payload.hosted_projects.is_empty()
|| !payload.deleted_hosted_projects.is_empty()
|| !payload.dev_servers.is_empty()
|| !payload.deleted_dev_servers.is_empty()
|| !payload.remote_projects.is_empty()
|| !payload.deleted_remote_projects.is_empty();
|| !payload.deleted_hosted_projects.is_empty();
if channels_changed {
if !payload.delete_channels.is_empty() {
@@ -1313,60 +1180,6 @@ impl ChannelStore {
.remove_hosted_project(old_project.project_id);
}
}
for remote_project in payload.remote_projects {
let remote_project: RemoteProject = remote_project.into();
if let Some(old_remote_project) = self
.remote_projects
.insert(remote_project.id, remote_project.clone())
{
self.channel_states
.entry(old_remote_project.channel_id)
.or_default()
.remove_remote_project(old_remote_project.id);
}
self.channel_states
.entry(remote_project.channel_id)
.or_default()
.add_remote_project(remote_project.id);
}
for remote_project_id in payload.deleted_remote_projects {
let remote_project_id = RemoteProjectId(remote_project_id);
if let Some(old_project) = self.remote_projects.remove(&remote_project_id) {
self.channel_states
.entry(old_project.channel_id)
.or_default()
.remove_remote_project(old_project.id);
}
}
for dev_server in payload.dev_servers {
let dev_server: DevServer = dev_server.into();
if let Some(old_server) = self.dev_servers.insert(dev_server.id, dev_server.clone())
{
self.channel_states
.entry(old_server.channel_id)
.or_default()
.remove_dev_server(old_server.id);
}
self.channel_states
.entry(dev_server.channel_id)
.or_default()
.add_dev_server(dev_server.id);
}
for dev_server_id in payload.deleted_dev_servers {
let dev_server_id = DevServerId(dev_server_id);
if let Some(old_server) = self.dev_servers.remove(&dev_server_id) {
self.channel_states
.entry(old_server.channel_id)
.or_default()
.remove_dev_server(old_server.id);
}
}
}
cx.notify();
@@ -1481,20 +1294,4 @@ impl ChannelState {
fn remove_hosted_project(&mut self, project_id: ProjectId) {
self.projects.remove(&project_id);
}
fn add_remote_project(&mut self, remote_project_id: RemoteProjectId) {
self.remote_projects.insert(remote_project_id);
}
fn remove_remote_project(&mut self, remote_project_id: RemoteProjectId) {
self.remote_projects.remove(&remote_project_id);
}
fn add_dev_server(&mut self, dev_server_id: DevServerId) {
self.dev_servers.insert(dev_server_id);
}
fn remove_dev_server(&mut self, dev_server_id: DevServerId) {
self.dev_servers.remove(&dev_server_id);
}
}

View File

@@ -264,7 +264,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
);
assert_eq!(
channel.next_event(cx).await,
channel.next_event(cx),
ChannelChatEvent::MessagesUpdated {
old_range: 2..2,
new_count: 1,
@@ -317,7 +317,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
);
assert_eq!(
channel.next_event(cx).await,
channel.next_event(cx),
ChannelChatEvent::MessagesUpdated {
old_range: 0..0,
new_count: 2,

View File

@@ -1,4 +1,4 @@
#![cfg_attr(any(target_os = "linux", target_os = "windows"), allow(dead_code))]
#![cfg_attr(target_os = "linux", allow(dead_code))]
use anyhow::{anyhow, Context, Result};
use clap::Parser;

View File

@@ -28,7 +28,7 @@ use release_channel::{AppVersion, ReleaseChannel};
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources, SettingsStore};
use settings::{Settings, SettingsStore};
use std::fmt;
use std::{
any::TypeId,
@@ -97,8 +97,15 @@ impl Settings for ClientSettings {
type FileContent = ClientSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
let mut result = sources.json_merge::<Self>()?;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self>
where
Self: Sized,
{
let mut result = Self::load_via_json_merge(default_value, user_values)?;
if let Some(server_url) = &*ZED_SERVER_URL {
result.server_url = server_url.clone()
}
@@ -420,19 +427,21 @@ impl settings::Settings for TelemetrySettings {
type FileContent = TelemetrySettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self {
diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or(
sources
.default
diagnostics: user_values.first().and_then(|v| v.diagnostics).unwrap_or(
default_value
.diagnostics
.ok_or_else(Self::missing_default)?,
),
metrics: sources
.user
.as_ref()
metrics: user_values
.first()
.and_then(|v| v.metrics)
.unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?),
.unwrap_or(default_value.metrics.ok_or_else(Self::missing_default)?),
})
}
}
@@ -759,9 +768,8 @@ impl Client {
read_credentials_from_keychain(cx).await.is_some()
}
pub fn set_dev_server_token(&self, token: DevServerToken) -> &Self {
pub fn set_dev_server_token(&self, token: DevServerToken) {
self.state.write().credentials = Some(Credentials::DevServer { token });
self
}
#[async_recursion(?Send)]
@@ -782,6 +790,7 @@ impl Client {
}
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
};
if was_disconnected {
self.set_status(Status::Authenticating, cx);
} else {

View File

@@ -590,10 +590,7 @@ mod tests {
}
#[gpui::test]
async fn test_telemetry_flush_on_flush_interval(
executor: BackgroundExecutor,
cx: &mut TestAppContext,
) {
async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
init_test(cx);
let clock = Arc::new(FakeSystemClock::new(
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),

View File

@@ -27,12 +27,6 @@ impl std::fmt::Display for ChannelId {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct ProjectId(pub u64);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct DevServerId(pub u64);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct RemoteProjectId(pub u64);
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct ParticipantIndex(pub u32);

View File

@@ -1,5 +1,5 @@
DATABASE_URL = "postgres://postgres@localhost/zed"
# DATABASE_URL = "sqlite:////root/0/zed/db.sqlite3?mode=rwc"
# DATABASE_URL = "sqlite:////home/zed/.config/zed/db.sqlite3?mode=rwc"
DATABASE_MAX_CONNECTIONS = 5
HTTP_PORT = 8080
API_TOKEN = "secret"

View File

@@ -18,7 +18,6 @@ sqlite = ["sea-orm/sqlx-sqlite", "sqlx/sqlite"]
test-support = ["sqlite"]
[dependencies]
anthropic.workspace = true
anyhow.workspace = true
async-tungstenite = "0.16"
aws-config = { version = "1.1.5" }
@@ -47,7 +46,6 @@ reqwest = { version = "0.11", features = ["json"] }
rpc.workspace = true
scrypt = "0.7"
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
semantic_version.workspace = true
semver.workspace = true
serde.workspace = true
serde_derive.workspace = true
@@ -63,8 +61,8 @@ tokio.workspace = true
toml.workspace = true
tower = "0.4"
tower-http = { workspace = true, features = ["trace"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927
tracing = "0.1.34"
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json", "registry", "tracing-log"] }
util.workspace = true
uuid.workspace = true
@@ -102,4 +100,3 @@ theme.workspace = true
unindent.workspace = true
util.workspace = true
workspace = { workspace = true, features = ["test-support"] }
headless.workspace = true

View File

@@ -47,6 +47,19 @@ spec:
metadata:
labels:
app: ${ZED_SERVICE_NAME}
annotations:
ad.datadoghq.com/collab.check_names: |
["openmetrics"]
ad.datadoghq.com/collab.init_configs: |
[{}]
ad.datadoghq.com/collab.instances: |
[
{
"openmetrics_endpoint": "http://%%host%%:%%port%%/metrics",
"namespace": "collab_${ZED_KUBE_NAMESPACE}",
"metrics": [".*"]
}
]
spec:
containers:
- name: ${ZED_SERVICE_NAME}
@@ -117,11 +130,6 @@ spec:
secretKeyRef:
name: openai
key: api_key
- name: ANTHROPIC_API_KEY
valueFrom:
secretKeyRef:
name: anthropic
key: api_key
- name: BLOB_STORE_ACCESS_KEY
valueFrom:
secretKeyRef:

View File

@@ -45,13 +45,12 @@ CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
CREATE TABLE "projects" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE,
"room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE NOT NULL,
"host_user_id" INTEGER REFERENCES users (id),
"host_connection_id" INTEGER,
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
"hosted_project_id" INTEGER REFERENCES hosted_projects (id),
"remote_project_id" INTEGER REFERENCES remote_projects(id)
"hosted_project_id" INTEGER REFERENCES hosted_projects (id)
);
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
@@ -398,9 +397,7 @@ CREATE TABLE hosted_projects (
channel_id INTEGER NOT NULL REFERENCES channels(id),
name TEXT NOT NULL,
visibility TEXT NOT NULL,
deleted_at TIMESTAMP NULL,
dev_server_id INTEGER REFERENCES dev_servers(id),
dev_server_path TEXT
deleted_at TIMESTAMP NULL
);
CREATE INDEX idx_hosted_projects_on_channel_id ON hosted_projects (channel_id);
CREATE UNIQUE INDEX uix_hosted_projects_on_channel_id_and_name ON hosted_projects (channel_id, name) WHERE (deleted_at IS NULL);
@@ -412,13 +409,3 @@ CREATE TABLE dev_servers (
hashed_token TEXT NOT NULL
);
CREATE INDEX idx_dev_servers_on_channel_id ON dev_servers (channel_id);
CREATE TABLE remote_projects (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channel_id INTEGER NOT NULL REFERENCES channels(id),
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
name TEXT NOT NULL,
path TEXT NOT NULL
);
ALTER TABLE hosted_projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id);

View File

@@ -1,9 +0,0 @@
CREATE TABLE remote_projects (
id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
channel_id INT NOT NULL REFERENCES channels(id),
dev_server_id INT NOT NULL REFERENCES dev_servers(id),
name TEXT NOT NULL,
path TEXT NOT NULL
);
ALTER TABLE projects ADD COLUMN remote_project_id INTEGER REFERENCES remote_projects(id);

View File

@@ -1,9 +0,0 @@
CREATE TABLE IF NOT EXISTS "embeddings" (
"model" TEXT,
"digest" BYTEA,
"dimensions" FLOAT4[1536],
"retrieved_at" TIMESTAMP NOT NULL DEFAULT now(),
PRIMARY KEY ("model", "digest")
);
CREATE INDEX IF NOT EXISTS "idx_retrieved_at_on_embeddings" ON "embeddings" ("retrieved_at");

View File

@@ -10,7 +10,6 @@ use axum::{
Extension, Router, TypedHeader,
};
use rpc::ExtensionMetadata;
use semantic_version::SemanticVersion;
use serde::{Serialize, Serializer};
use sha2::{Digest, Sha256};
use std::sync::{Arc, OnceLock};
@@ -18,6 +17,7 @@ use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, CallEvent, CopilotEvent, CpuEvent, EditEvent,
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, MemoryEvent, SettingEvent,
};
use util::SemanticVersion;
pub fn router() -> Router {
Router::new()
@@ -459,12 +459,6 @@ impl ToUpload {
}
insert.end().await?;
let event_count = rows.len();
log::info!(
"wrote {event_count} {event_specifier} to '{table}'",
event_specifier = if event_count == 1 { "event" } else { "events" }
);
}
Ok(())
@@ -528,9 +522,9 @@ impl EditorEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -590,9 +584,9 @@ impl CopilotEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -645,9 +639,9 @@ impl CallEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone().unwrap_or_default(),
session_id: body.session_id.clone(),
@@ -694,9 +688,9 @@ impl AssistantEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -738,9 +732,9 @@ impl CpuEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -785,9 +779,9 @@ impl MemoryEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -831,9 +825,9 @@ impl AppEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -876,9 +870,9 @@ impl SettingEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -927,9 +921,9 @@ impl ExtensionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -991,9 +985,9 @@ impl EditEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -1040,9 +1034,9 @@ impl ActionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),

View File

@@ -1,4 +1,3 @@
use crate::db::ExtensionVersionConstraints;
use crate::{db::NewExtensionVersion, AppState, Error, Result};
use anyhow::{anyhow, Context as _};
use aws_sdk_s3::presigning::PresigningConfig;
@@ -11,17 +10,14 @@ use axum::{
};
use collections::HashMap;
use rpc::{ExtensionApiManifest, GetExtensionsResponse};
use semantic_version::SemanticVersion;
use serde::Deserialize;
use std::{sync::Arc, time::Duration};
use time::PrimitiveDateTime;
use util::{maybe, ResultExt};
use util::ResultExt;
pub fn router() -> Router {
Router::new()
.route("/extensions", get(get_extensions))
.route("/extensions/updates", get(get_extension_updates))
.route("/extensions/:extension_id", get(get_extension_versions))
.route(
"/extensions/:extension_id/download",
get(download_latest_extension),
@@ -36,108 +32,38 @@ pub fn router() -> Router {
struct GetExtensionsParams {
filter: Option<String>,
#[serde(default)]
ids: Option<String>,
#[serde(default)]
max_schema_version: i32,
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionParams {
extension_id: String,
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_ids = params
.ids
.as_ref()
.map(|s| s.split(',').map(|s| s.trim()).collect::<Vec<_>>());
let extensions = if let Some(extension_ids) = extension_ids {
app.db.get_extensions_by_ids(&extension_ids, None).await?
} else {
app.db
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?
};
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionUpdatesParams {
ids: String,
min_schema_version: i32,
max_schema_version: i32,
min_wasm_api_version: SemanticVersion,
max_wasm_api_version: SemanticVersion,
}
async fn get_extension_updates(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionUpdatesParams>,
) -> Result<Json<GetExtensionsResponse>> {
let constraints = ExtensionVersionConstraints {
schema_versions: params.min_schema_version..=params.max_schema_version,
wasm_api_versions: params.min_wasm_api_version..=params.max_wasm_api_version,
};
let extension_ids = params.ids.split(',').map(|s| s.trim()).collect::<Vec<_>>();
let extensions = app
.db
.get_extensions_by_ids(&extension_ids, Some(&constraints))
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?;
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionVersionsParams {
extension_id: String,
}
async fn get_extension_versions(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<GetExtensionVersionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_versions = app.db.get_extension_versions(&params.extension_id).await?;
Ok(Json(GetExtensionsResponse {
data: extension_versions,
}))
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionPathParams {
extension_id: String,
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionQueryParams {
min_schema_version: Option<i32>,
max_schema_version: Option<i32>,
min_wasm_api_version: Option<SemanticVersion>,
max_wasm_api_version: Option<SemanticVersion>,
}
async fn download_latest_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadLatestExtensionPathParams>,
Query(query): Query<DownloadLatestExtensionQueryParams>,
Path(params): Path<DownloadLatestExtensionParams>,
) -> Result<Redirect> {
let constraints = maybe!({
let min_schema_version = query.min_schema_version?;
let max_schema_version = query.max_schema_version?;
let min_wasm_api_version = query.min_wasm_api_version?;
let max_wasm_api_version = query.max_wasm_api_version?;
Some(ExtensionVersionConstraints {
schema_versions: min_schema_version..=max_schema_version,
wasm_api_versions: min_wasm_api_version..=max_wasm_api_version,
})
});
let extension = app
.db
.get_extension(&params.extension_id, constraints.as_ref())
.get_extension(&params.extension_id)
.await?
.ok_or_else(|| anyhow!("unknown extension"))?;
download_extension(
@@ -150,12 +76,6 @@ async fn download_latest_extension(
.await
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn download_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadExtensionParams>,

View File

@@ -1,8 +1,9 @@
use collections::HashMap;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use util::SemanticVersion;
#[derive(Debug)]
pub struct IpsFile {

View File

@@ -10,7 +10,6 @@ use axum::{
response::IntoResponse,
};
use prometheus::{exponential_buckets, register_histogram, Histogram};
pub use rpc::auth::random_token;
use scrypt::{
password_hash::{PasswordHash, PasswordVerifier},
Scrypt,
@@ -153,7 +152,7 @@ pub async fn create_access_token(
/// Hashing prevents anyone with access to the database being able to login.
/// As the token is randomly generated, we don't need to worry about scrypt-style
/// protection.
pub fn hash_access_token(token: &str) -> String {
fn hash_access_token(token: &str) -> String {
let digest = sha2::Sha256::digest(token);
format!(
"$sha256${}",
@@ -231,15 +230,18 @@ pub async fn verify_access_token(
})
}
pub fn generate_dev_server_token(id: usize, access_token: String) -> String {
format!("{}.{}", id, access_token)
}
// a dev_server_token has the format <id>.<base64>. This is to make them
// relatively easy to copy/paste around.
pub async fn verify_dev_server_token(
dev_server_token: &str,
db: &Arc<Database>,
) -> anyhow::Result<dev_server::Model> {
let (id, token) = split_dev_server_token(dev_server_token)?;
let mut parts = dev_server_token.splitn(2, '.');
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
let token = parts
.next()
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
let token_hash = hash_access_token(&token);
let server = db.get_dev_server(id).await?;
@@ -255,17 +257,6 @@ pub async fn verify_dev_server_token(
}
}
// a dev_server_token has the format <id>.<base64>. This is to make them
// relatively easy to copy/paste around.
pub fn split_dev_server_token(dev_server_token: &str) -> anyhow::Result<(DevServerId, &str)> {
let mut parts = dev_server_token.splitn(2, '.');
let id = DevServerId(parts.next().unwrap_or_default().parse()?);
let token = parts
.next()
.ok_or_else(|| anyhow!("invalid dev server token format"))?;
Ok((id, token))
}
#[cfg(test)]
mod test {
use rand::thread_rng;

View File

@@ -21,13 +21,11 @@ use sea_orm::{
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
TransactionTrait,
};
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde::{ser::Error as _, Deserialize, Serialize, Serializer};
use sqlx::{
migrate::{Migrate, Migration, MigrationSource},
Connection,
};
use std::ops::RangeInclusive;
use std::{
fmt::Write as _,
future::Future,
@@ -38,7 +36,7 @@ use std::{
sync::Arc,
time::Duration,
};
use time::PrimitiveDateTime;
use time::{format_description::well_known::iso8601, PrimitiveDateTime};
use tokio::sync::{Mutex, OwnedMutexGuard};
#[cfg(test)]
@@ -56,7 +54,6 @@ pub struct Database {
options: ConnectOptions,
pool: DatabaseConnection,
rooms: DashMap<RoomId, Arc<Mutex<()>>>,
projects: DashMap<ProjectId, Arc<Mutex<()>>>,
rng: Mutex<StdRng>,
executor: Executor,
notification_kinds_by_id: HashMap<NotificationKindId, &'static str>,
@@ -75,7 +72,6 @@ impl Database {
options: options.clone(),
pool: sea_orm::Database::connect(options).await?,
rooms: DashMap::with_capacity(16384),
projects: DashMap::with_capacity(16384),
rng: Mutex::new(StdRng::seed_from_u64(0)),
notification_kinds_by_id: HashMap::default(),
notification_kinds_by_name: HashMap::default(),
@@ -88,7 +84,6 @@ impl Database {
#[cfg(test)]
pub fn reset(&self) {
self.rooms.clear();
self.projects.clear();
}
/// Runs the database migrations.
@@ -193,10 +188,7 @@ impl Database {
}
/// The same as room_transaction, but if you need to only optionally return a Room.
async fn optional_room_transaction<F, Fut, T>(
&self,
f: F,
) -> Result<Option<TransactionGuard<T>>>
async fn optional_room_transaction<F, Fut, T>(&self, f: F) -> Result<Option<RoomGuard<T>>>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<Option<(RoomId, T)>>>,
@@ -211,7 +203,7 @@ impl Database {
let _guard = lock.lock_owned().await;
match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(Some(TransactionGuard {
return Ok(Some(RoomGuard {
data,
_guard,
_not_send: PhantomData,
@@ -246,63 +238,10 @@ impl Database {
self.run(body).await
}
async fn project_transaction<F, Fut, T>(
&self,
project_id: ProjectId,
f: F,
) -> Result<TransactionGuard<T>>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
{
let room_id = Database::room_id_for_project(&self, project_id).await?;
let body = async {
let mut i = 0;
loop {
let lock = if let Some(room_id) = room_id {
self.rooms.entry(room_id).or_default().clone()
} else {
self.projects.entry(project_id).or_default().clone()
};
let _guard = lock.lock_owned().await;
let (tx, result) = self.with_transaction(&f).await?;
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(TransactionGuard {
data,
_guard,
_not_send: PhantomData,
});
}
Err(error) => {
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
},
Err(error) => {
tx.rollback().await?;
if !self.retry_on_serialization_error(&error, i).await {
return Err(error);
}
}
}
i += 1;
}
};
self.run(body).await
}
/// room_transaction runs the block in a transaction. It returns a RoomGuard, that keeps
/// the database locked until it is dropped. This ensures that updates sent to clients are
/// properly serialized with respect to database changes.
async fn room_transaction<F, Fut, T>(
&self,
room_id: RoomId,
f: F,
) -> Result<TransactionGuard<T>>
async fn room_transaction<F, Fut, T>(&self, room_id: RoomId, f: F) -> Result<RoomGuard<T>>
where
F: Send + Fn(TransactionHandle) -> Fut,
Fut: Send + Future<Output = Result<T>>,
@@ -316,7 +255,7 @@ impl Database {
match result {
Ok(data) => match tx.commit().await.map_err(Into::into) {
Ok(()) => {
return Ok(TransactionGuard {
return Ok(RoomGuard {
data,
_guard,
_not_send: PhantomData,
@@ -458,16 +397,15 @@ impl Deref for TransactionHandle {
}
}
/// [`TransactionGuard`] keeps a database transaction alive until it is dropped.
/// It wraps data that depends on the state of the database and prevents an additional
/// transaction from starting that would invalidate that data.
pub struct TransactionGuard<T> {
/// [`RoomGuard`] keeps a database transaction alive until it is dropped.
/// so that updates to rooms are serialized.
pub struct RoomGuard<T> {
data: T,
_guard: OwnedMutexGuard<()>,
_not_send: PhantomData<Rc<()>>,
}
impl<T> Deref for TransactionGuard<T> {
impl<T> Deref for RoomGuard<T> {
type Target = T;
fn deref(&self) -> &T {
@@ -475,13 +413,13 @@ impl<T> Deref for TransactionGuard<T> {
}
}
impl<T> DerefMut for TransactionGuard<T> {
impl<T> DerefMut for RoomGuard<T> {
fn deref_mut(&mut self) -> &mut T {
&mut self.data
}
}
impl<T> TransactionGuard<T> {
impl<T> RoomGuard<T> {
/// Returns the inner value of the guard.
pub fn into_inner(self) -> T {
self.data
@@ -520,8 +458,6 @@ pub struct UpdatedChannelMessage {
pub notifications: NotificationBatch,
pub reply_to_message_id: Option<MessageId>,
pub timestamp: PrimitiveDateTime,
pub deleted_mention_notification_ids: Vec<NotificationId>,
pub updated_mention_notifications: Vec<rpc::proto::Notification>,
}
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
@@ -578,7 +514,6 @@ pub struct MembershipUpdated {
/// The result of setting a member's role.
#[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub enum SetMemberRoleResult {
InviteUpdated(Channel),
MembershipUpdated(MembershipUpdated),
@@ -655,8 +590,6 @@ pub struct ChannelsForUser {
pub channel_memberships: Vec<channel_member::Model>,
pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
pub hosted_projects: Vec<proto::HostedProject>,
pub dev_servers: Vec<dev_server::Model>,
pub remote_projects: Vec<proto::RemoteProject>,
pub observed_buffer_versions: Vec<proto::ChannelBufferVersion>,
pub observed_channel_messages: Vec<proto::ChannelMessageId>,
@@ -698,30 +631,6 @@ pub struct RejoinedProject {
pub language_servers: Vec<proto::LanguageServer>,
}
impl RejoinedProject {
pub fn to_proto(&self) -> proto::RejoinedProject {
proto::RejoinedProject {
id: self.id.to_proto(),
worktrees: self
.worktrees
.iter()
.map(|worktree| proto::WorktreeMetadata {
id: worktree.id,
root_name: worktree.root_name.clone(),
visible: worktree.visible,
abs_path: worktree.abs_path.clone(),
})
.collect(),
collaborators: self
.collaborators
.iter()
.map(|collaborator| collaborator.to_proto())
.collect(),
language_servers: self.language_servers.clone(),
}
}
}
#[derive(Debug)]
pub struct RejoinedWorktree {
pub id: u64,
@@ -821,7 +730,20 @@ pub struct NewExtensionVersion {
pub published_at: PrimitiveDateTime,
}
pub struct ExtensionVersionConstraints {
pub schema_versions: RangeInclusive<i32>,
pub wasm_api_versions: RangeInclusive<SemanticVersion>,
pub fn serialize_iso8601<S: Serializer>(
datetime: &PrimitiveDateTime,
serializer: S,
) -> Result<S::Ok, S::Error> {
const SERDE_CONFIG: iso8601::EncodedConfig = iso8601::Config::DEFAULT
.set_year_is_six_digits(false)
.set_time_precision(iso8601::TimePrecision::Second {
decimal_digits: None,
})
.encode();
datetime
.assume_utc()
.format(&time::format_description::well_known::Iso8601::<SERDE_CONFIG>)
.map_err(S::Error::custom)?
.serialize(serializer)
}

View File

@@ -84,7 +84,6 @@ id_type!(NotificationId);
id_type!(NotificationKindId);
id_type!(ProjectCollaboratorId);
id_type!(ProjectId);
id_type!(RemoteProjectId);
id_type!(ReplicaId);
id_type!(RoomId);
id_type!(RoomParticipantId);
@@ -271,18 +270,3 @@ impl Into<i32> for ChannelVisibility {
proto.into()
}
}
#[derive(Copy, Clone, Debug, Serialize, PartialEq)]
pub enum PrincipalId {
UserId(UserId),
DevServerId(DevServerId),
}
/// Indicate whether a [Buffer] has permissions to edit.
#[derive(PartialEq, Clone, Copy, Debug)]
pub enum Capability {
/// The buffer is a mutable replica.
ReadWrite,
/// The buffer is a read-only replica.
ReadOnly,
}

View File

@@ -6,14 +6,12 @@ pub mod channels;
pub mod contacts;
pub mod contributors;
pub mod dev_servers;
pub mod embeddings;
pub mod extensions;
pub mod hosted_projects;
pub mod messages;
pub mod notifications;
pub mod projects;
pub mod rate_buckets;
pub mod remote_projects;
pub mod rooms;
pub mod servers;
pub mod users;

View File

@@ -529,133 +529,127 @@ impl Database {
ancestor_channel: Option<&channel::Model>,
tx: &DatabaseTransaction,
) -> Result<ChannelsForUser> {
// let mut filter = channel_member::Column::UserId
// .eq(user_id)
// .and(channel_member::Column::Accepted.eq(true));
let mut filter = channel_member::Column::UserId
.eq(user_id)
.and(channel_member::Column::Accepted.eq(true));
// if let Some(ancestor) = ancestor_channel {
// filter = filter.and(channel_member::Column::ChannelId.eq(ancestor.root_id()));
// }
if let Some(ancestor) = ancestor_channel {
filter = filter.and(channel_member::Column::ChannelId.eq(ancestor.root_id()));
}
// let channel_memberships = channel_member::Entity::find()
// .filter(filter)
// .all(tx)
// .await?;
let channel_memberships = channel_member::Entity::find()
.filter(filter)
.all(tx)
.await?;
// let channels = channel::Entity::find()
// .filter(channel::Column::Id.is_in(channel_memberships.iter().map(|m| m.channel_id)))
// .all(tx)
// .await?;
let channels = channel::Entity::find()
.filter(channel::Column::Id.is_in(channel_memberships.iter().map(|m| m.channel_id)))
.all(tx)
.await?;
// let mut descendants = self
// .get_channel_descendants_excluding_self(channels.iter(), tx)
// .await?;
let mut descendants = self
.get_channel_descendants_excluding_self(channels.iter(), tx)
.await?;
// for channel in channels {
// if let Err(ix) = descendants.binary_search_by_key(&channel.path(), |c| c.path()) {
// descendants.insert(ix, channel);
// }
// }
for channel in channels {
if let Err(ix) = descendants.binary_search_by_key(&channel.path(), |c| c.path()) {
descendants.insert(ix, channel);
}
}
// let roles_by_channel_id = channel_memberships
// .iter()
// .map(|membership| (membership.channel_id, membership.role))
// .collect::<HashMap<_, _>>();
let roles_by_channel_id = channel_memberships
.iter()
.map(|membership| (membership.channel_id, membership.role))
.collect::<HashMap<_, _>>();
// let channels: Vec<Channel> = descendants
// .into_iter()
// .filter_map(|channel| {
// let parent_role = roles_by_channel_id.get(&channel.root_id())?;
// if parent_role.can_see_channel(channel.visibility) {
// Some(Channel::from_model(channel))
// } else {
// None
// }
// })
// .collect();
let channels: Vec<Channel> = descendants
.into_iter()
.filter_map(|channel| {
let parent_role = roles_by_channel_id.get(&channel.root_id())?;
if parent_role.can_see_channel(channel.visibility) {
Some(Channel::from_model(channel))
} else {
None
}
})
.collect();
// #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
// enum QueryUserIdsAndChannelIds {
// ChannelId,
// UserId,
// }
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryUserIdsAndChannelIds {
ChannelId,
UserId,
}
// let mut channel_participants: HashMap<ChannelId, Vec<UserId>> = HashMap::default();
// {
// let mut rows = room_participant::Entity::find()
// .inner_join(room::Entity)
// .filter(room::Column::ChannelId.is_in(channels.iter().map(|c| c.id)))
// .select_only()
// .column(room::Column::ChannelId)
// .column(room_participant::Column::UserId)
// .into_values::<_, QueryUserIdsAndChannelIds>()
// .stream(tx)
// .await?;
// while let Some(row) = rows.next().await {
// let row: (ChannelId, UserId) = row?;
// channel_participants.entry(row.0).or_default().push(row.1)
// }
// }
let mut channel_participants: HashMap<ChannelId, Vec<UserId>> = HashMap::default();
{
let mut rows = room_participant::Entity::find()
.inner_join(room::Entity)
.filter(room::Column::ChannelId.is_in(channels.iter().map(|c| c.id)))
.select_only()
.column(room::Column::ChannelId)
.column(room_participant::Column::UserId)
.into_values::<_, QueryUserIdsAndChannelIds>()
.stream(tx)
.await?;
while let Some(row) = rows.next().await {
let row: (ChannelId, UserId) = row?;
channel_participants.entry(row.0).or_default().push(row.1)
}
}
// let channel_ids = channels.iter().map(|c| c.id).collect::<Vec<_>>();
let channel_ids = channels.iter().map(|c| c.id).collect::<Vec<_>>();
// let mut channel_ids_by_buffer_id = HashMap::default();
// let mut latest_buffer_versions: Vec<ChannelBufferVersion> = vec![];
// let mut rows = buffer::Entity::find()
// .filter(buffer::Column::ChannelId.is_in(channel_ids.iter().copied()))
// .stream(tx)
// .await?;
// while let Some(row) = rows.next().await {
// let row = row?;
// channel_ids_by_buffer_id.insert(row.id, row.channel_id);
// latest_buffer_versions.push(ChannelBufferVersion {
// channel_id: row.channel_id.0 as u64,
// epoch: row.latest_operation_epoch.unwrap_or_default() as u64,
// version: if let Some((latest_lamport_timestamp, latest_replica_id)) = row
// .latest_operation_lamport_timestamp
// .zip(row.latest_operation_replica_id)
// {
// vec![VectorClockEntry {
// timestamp: latest_lamport_timestamp as u32,
// replica_id: latest_replica_id as u32,
// }]
// } else {
// vec![]
// },
// });
// }
// drop(rows);
let mut channel_ids_by_buffer_id = HashMap::default();
let mut latest_buffer_versions: Vec<ChannelBufferVersion> = vec![];
let mut rows = buffer::Entity::find()
.filter(buffer::Column::ChannelId.is_in(channel_ids.iter().copied()))
.stream(tx)
.await?;
while let Some(row) = rows.next().await {
let row = row?;
channel_ids_by_buffer_id.insert(row.id, row.channel_id);
latest_buffer_versions.push(ChannelBufferVersion {
channel_id: row.channel_id.0 as u64,
epoch: row.latest_operation_epoch.unwrap_or_default() as u64,
version: if let Some((latest_lamport_timestamp, latest_replica_id)) = row
.latest_operation_lamport_timestamp
.zip(row.latest_operation_replica_id)
{
vec![VectorClockEntry {
timestamp: latest_lamport_timestamp as u32,
replica_id: latest_replica_id as u32,
}]
} else {
vec![]
},
});
}
drop(rows);
// let latest_channel_messages = self.latest_channel_messages(&channel_ids, tx).await?;
let latest_channel_messages = self.latest_channel_messages(&channel_ids, tx).await?;
// let observed_buffer_versions = self
// .observed_channel_buffer_changes(&channel_ids_by_buffer_id, user_id, tx)
// .await?;
let observed_buffer_versions = self
.observed_channel_buffer_changes(&channel_ids_by_buffer_id, user_id, tx)
.await?;
// let observed_channel_messages = self
// .observed_channel_messages(&channel_ids, user_id, tx)
// .await?;
let observed_channel_messages = self
.observed_channel_messages(&channel_ids, user_id, tx)
.await?;
// let hosted_projects = self
// .get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
// .await?;
let hosted_projects = self
.get_hosted_projects(&channel_ids, &roles_by_channel_id, tx)
.await?;
// let dev_servers = self.get_dev_servers(&channel_ids, tx).await?;
// let remote_projects = self.get_remote_projects(&channel_ids, tx).await?;
// Ok(ChannelsForUser {
// channel_memberships,
// channels,
// hosted_projects,
// dev_servers,
// remote_projects,
// channel_participants,
// latest_buffer_versions,
// latest_channel_messages,
// observed_buffer_versions,
// observed_channel_messages,
// })
Err(anyhow!("not implemented"))
Ok(ChannelsForUser {
channel_memberships,
channels,
hosted_projects,
channel_participants,
latest_buffer_versions,
latest_channel_messages,
observed_buffer_versions,
observed_channel_messages,
})
}
/// Sets the role for the specified channel member.

View File

@@ -3,89 +3,88 @@ use super::*;
impl Database {
/// Retrieves the contacts for the user with the given ID.
pub async fn get_contacts(&self, user_id: UserId) -> Result<Vec<Contact>> {
Ok(vec![])
// #[derive(Debug, FromQueryResult)]
// struct ContactWithUserBusyStatuses {
// user_id_a: UserId,
// user_id_b: UserId,
// a_to_b: bool,
// accepted: bool,
// user_a_busy: bool,
// user_b_busy: bool,
// }
#[derive(Debug, FromQueryResult)]
struct ContactWithUserBusyStatuses {
user_id_a: UserId,
user_id_b: UserId,
a_to_b: bool,
accepted: bool,
user_a_busy: bool,
user_b_busy: bool,
}
// self.transaction(|tx| async move {
// let user_a_participant = Alias::new("user_a_participant");
// let user_b_participant = Alias::new("user_b_participant");
// let mut db_contacts = contact::Entity::find()
// .column_as(
// Expr::col((user_a_participant.clone(), room_participant::Column::Id))
// .is_not_null(),
// "user_a_busy",
// )
// .column_as(
// Expr::col((user_b_participant.clone(), room_participant::Column::Id))
// .is_not_null(),
// "user_b_busy",
// )
// .filter(
// contact::Column::UserIdA
// .eq(user_id)
// .or(contact::Column::UserIdB.eq(user_id)),
// )
// .join_as(
// JoinType::LeftJoin,
// contact::Relation::UserARoomParticipant.def(),
// user_a_participant,
// )
// .join_as(
// JoinType::LeftJoin,
// contact::Relation::UserBRoomParticipant.def(),
// user_b_participant,
// )
// .into_model::<ContactWithUserBusyStatuses>()
// .stream(&*tx)
// .await?;
self.transaction(|tx| async move {
let user_a_participant = Alias::new("user_a_participant");
let user_b_participant = Alias::new("user_b_participant");
let mut db_contacts = contact::Entity::find()
.column_as(
Expr::col((user_a_participant.clone(), room_participant::Column::Id))
.is_not_null(),
"user_a_busy",
)
.column_as(
Expr::col((user_b_participant.clone(), room_participant::Column::Id))
.is_not_null(),
"user_b_busy",
)
.filter(
contact::Column::UserIdA
.eq(user_id)
.or(contact::Column::UserIdB.eq(user_id)),
)
.join_as(
JoinType::LeftJoin,
contact::Relation::UserARoomParticipant.def(),
user_a_participant,
)
.join_as(
JoinType::LeftJoin,
contact::Relation::UserBRoomParticipant.def(),
user_b_participant,
)
.into_model::<ContactWithUserBusyStatuses>()
.stream(&*tx)
.await?;
// let mut contacts = Vec::new();
// while let Some(db_contact) = db_contacts.next().await {
// let db_contact = db_contact?;
// if db_contact.user_id_a == user_id {
// if db_contact.accepted {
// contacts.push(Contact::Accepted {
// user_id: db_contact.user_id_b,
// busy: db_contact.user_b_busy,
// });
// } else if db_contact.a_to_b {
// contacts.push(Contact::Outgoing {
// user_id: db_contact.user_id_b,
// })
// } else {
// contacts.push(Contact::Incoming {
// user_id: db_contact.user_id_b,
// });
// }
// } else if db_contact.accepted {
// contacts.push(Contact::Accepted {
// user_id: db_contact.user_id_a,
// busy: db_contact.user_a_busy,
// });
// } else if db_contact.a_to_b {
// contacts.push(Contact::Incoming {
// user_id: db_contact.user_id_a,
// });
// } else {
// contacts.push(Contact::Outgoing {
// user_id: db_contact.user_id_a,
// });
// }
// }
let mut contacts = Vec::new();
while let Some(db_contact) = db_contacts.next().await {
let db_contact = db_contact?;
if db_contact.user_id_a == user_id {
if db_contact.accepted {
contacts.push(Contact::Accepted {
user_id: db_contact.user_id_b,
busy: db_contact.user_b_busy,
});
} else if db_contact.a_to_b {
contacts.push(Contact::Outgoing {
user_id: db_contact.user_id_b,
})
} else {
contacts.push(Contact::Incoming {
user_id: db_contact.user_id_b,
});
}
} else if db_contact.accepted {
contacts.push(Contact::Accepted {
user_id: db_contact.user_id_a,
busy: db_contact.user_a_busy,
});
} else if db_contact.a_to_b {
contacts.push(Contact::Incoming {
user_id: db_contact.user_id_a,
});
} else {
contacts.push(Contact::Outgoing {
user_id: db_contact.user_id_a,
});
}
}
// contacts.sort_unstable_by_key(|contact| contact.user_id());
contacts.sort_unstable_by_key(|contact| contact.user_id());
// Ok(contacts)
// })
// .await
Ok(contacts)
})
.await
}
/// Returns whether the given user is a busy (on a call).

View File

@@ -1,6 +1,6 @@
use sea_orm::{ActiveValue, ColumnTrait, DatabaseTransaction, EntityTrait, QueryFilter};
use sea_orm::EntityTrait;
use super::{channel, dev_server, ChannelId, Database, DevServerId, UserId};
use super::{dev_server, Database, DevServerId};
impl Database {
pub async fn get_dev_server(
@@ -15,42 +15,4 @@ impl Database {
})
.await
}
pub async fn get_dev_servers(
&self,
channel_ids: &Vec<ChannelId>,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<dev_server::Model>> {
let servers = dev_server::Entity::find()
.filter(dev_server::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
.all(tx)
.await?;
Ok(servers)
}
pub async fn create_dev_server(
&self,
channel_id: ChannelId,
name: &str,
hashed_access_token: &str,
user_id: UserId,
) -> crate::Result<(channel::Model, dev_server::Model)> {
self.transaction(|tx| async move {
let channel = self.get_channel_internal(channel_id, &tx).await?;
self.check_user_is_channel_admin(&channel, user_id, &tx)
.await?;
let dev_server = dev_server::Entity::insert(dev_server::ActiveModel {
id: ActiveValue::NotSet,
hashed_token: ActiveValue::Set(hashed_access_token.to_string()),
channel_id: ActiveValue::Set(channel_id),
name: ActiveValue::Set(name.to_string()),
})
.exec_with_returning(&*tx)
.await?;
Ok((channel, dev_server))
})
.await
}
}

View File

@@ -1,94 +0,0 @@
use super::*;
use time::Duration;
use time::OffsetDateTime;
impl Database {
pub async fn get_embeddings(
&self,
model: &str,
digests: &[Vec<u8>],
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
self.weak_transaction(|tx| async move {
let embeddings = {
let mut db_embeddings = embedding::Entity::find()
.filter(
embedding::Column::Model.eq(model).and(
embedding::Column::Digest
.is_in(digests.iter().map(|digest| digest.as_slice())),
),
)
.stream(&*tx)
.await?;
let mut embeddings = HashMap::default();
while let Some(db_embedding) = db_embeddings.next().await {
let db_embedding = db_embedding?;
embeddings.insert(db_embedding.digest, db_embedding.dimensions);
}
embeddings
};
if !embeddings.is_empty() {
let now = OffsetDateTime::now_utc();
let retrieved_at = PrimitiveDateTime::new(now.date(), now.time());
embedding::Entity::update_many()
.filter(
embedding::Column::Digest
.is_in(embeddings.keys().map(|digest| digest.as_slice())),
)
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
.exec(&*tx)
.await?;
}
Ok(embeddings)
})
.await
}
pub async fn save_embeddings(
&self,
model: &str,
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
) -> Result<()> {
self.weak_transaction(|tx| async move {
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
let now_offset_datetime = OffsetDateTime::now_utc();
let retrieved_at =
PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time());
embedding::ActiveModel {
model: ActiveValue::set(model.to_string()),
digest: ActiveValue::set(digest.clone()),
dimensions: ActiveValue::set(dimensions.clone()),
retrieved_at: ActiveValue::set(retrieved_at),
}
}))
.on_conflict(
OnConflict::columns([embedding::Column::Model, embedding::Column::Digest])
.do_nothing()
.to_owned(),
)
.exec_without_returning(&*tx)
.await?;
Ok(())
})
.await
}
pub async fn purge_old_embeddings(&self) -> Result<()> {
self.weak_transaction(|tx| async move {
embedding::Entity::delete_many()
.filter(
embedding::Column::RetrievedAt
.lte(OffsetDateTime::now_utc() - Duration::days(60)),
)
.exec(&*tx)
.await?;
Ok(())
})
.await
}
}

View File

@@ -1,8 +1,4 @@
use std::str::FromStr;
use chrono::Utc;
use sea_orm::sea_query::IntoCondition;
use util::ResultExt;
use super::*;
@@ -14,163 +10,53 @@ impl Database {
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let mut condition = Condition::all()
.add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.add(extension_version::Column::SchemaVersion.lte(max_schema_version));
let mut condition = Condition::all().add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
);
if let Some(filter) = filter {
let fuzzy_name_filter = Self::fuzzy_like_string(filter);
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
}
self.get_extensions_where(condition, Some(limit as u64), &tx)
.await
})
.await
}
pub async fn get_extensions_by_ids(
&self,
ids: &[&str],
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extensions = extension::Entity::find()
.filter(extension::Column::ExternalId.is_in(ids.iter().copied()))
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.filter(extension_version::Column::SchemaVersion.lte(max_schema_version))
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(Some(limit as u64))
.all(&*tx)
.await?;
let mut max_versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|extension| {
let (version, _) = max_versions.remove(&extension.id)?;
Some(metadata_from_extension_and_version(extension, version))
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
})
.await
}
async fn get_latest_versions_for_extensions(
&self,
extensions: &[extension::Model],
constraints: Option<&ExtensionVersionConstraints>,
tx: &DatabaseTransaction,
) -> Result<HashMap<ExtensionId, (extension_version::Model, SemanticVersion)>> {
let mut versions = extension_version::Entity::find()
.filter(
extension_version::Column::ExtensionId
.is_in(extensions.iter().map(|extension| extension.id)),
)
.stream(tx)
.await?;
let mut max_versions =
HashMap::<ExtensionId, (extension_version::Model, SemanticVersion)>::default();
while let Some(version) = versions.next().await {
let version = version?;
let Some(extension_version) = SemanticVersion::from_str(&version.version).log_err()
else {
continue;
};
if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) {
if max_extension_version > &extension_version {
continue;
}
}
if let Some(constraints) = constraints {
if !constraints
.schema_versions
.contains(&version.schema_version)
{
continue;
}
if let Some(wasm_api_version) = version.wasm_api_version.as_ref() {
if let Some(version) = SemanticVersion::from_str(wasm_api_version).log_err() {
if !constraints.wasm_api_versions.contains(&version) {
continue;
}
} else {
continue;
}
}
}
max_versions.insert(version.extension_id, (version, extension_version));
}
Ok(max_versions)
}
/// Returns all of the versions for the extension with the given ID.
pub async fn get_extension_versions(
&self,
extension_id: &str,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let condition = extension::Column::ExternalId
.eq(extension_id)
.into_condition();
self.get_extensions_where(condition, None, &tx).await
})
.await
}
async fn get_extensions_where(
&self,
condition: Condition,
limit: Option<u64>,
tx: &DatabaseTransaction,
) -> Result<Vec<ExtensionMetadata>> {
let extensions = extension::Entity::find()
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(limit)
.all(tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
}
pub async fn get_extension(
&self,
extension_id: &str,
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Option<ExtensionMetadata>> {
pub async fn get_extension(&self, extension_id: &str) -> Result<Option<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.filter(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such extension: {extension_id}"))?;
let extensions = [extension];
let mut versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
let [extension] = extensions;
Ok(versions.remove(&extension.id).map(|(max_version, _)| {
metadata_from_extension_and_version(extension, max_version)
Ok(extension.and_then(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
}))
})
.await

View File

@@ -1,8 +1,7 @@
use super::*;
use rpc::Notification;
use sea_orm::{SelectColumns, TryInsertResult};
use sea_orm::TryInsertResult;
use time::OffsetDateTime;
use util::ResultExt;
impl Database {
/// Inserts a record representing a user joining the chat for a given channel.
@@ -432,60 +431,53 @@ impl Database {
channel_ids: &[ChannelId],
tx: &DatabaseTransaction,
) -> Result<Vec<proto::ChannelMessageId>> {
// let mut values = String::new();
// for id in channel_ids {
// if !values.is_empty() {
// values.push_str(", ");
// }
// write!(&mut values, "({})", id).unwrap();
// }
let mut values = String::new();
for id in channel_ids {
if !values.is_empty() {
values.push_str(", ");
}
write!(&mut values, "({})", id).unwrap();
}
// if values.is_empty() {
// return Ok(Vec::default());
// }
if values.is_empty() {
return Ok(Vec::default());
}
// let sql = format!(
// r#"
// SELECT
// *
// FROM (
// SELECT
// *,
// row_number() OVER (
// PARTITION BY channel_id
// ORDER BY id DESC
// ) as row_number
// FROM channel_messages
// WHERE
// channel_id in ({values})
// ) AS messages
// WHERE
// row_number = 1
// "#,
// );
let sql = format!(
r#"
SELECT
*
FROM (
SELECT
*,
row_number() OVER (
PARTITION BY channel_id
ORDER BY id DESC
) as row_number
FROM channel_messages
WHERE
channel_id in ({values})
) AS messages
WHERE
row_number = 1
"#,
);
// let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
// let mut last_messages = channel_message::Model::find_by_statement(stmt)
// .stream(tx)
// .await?;
let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
let mut last_messages = channel_message::Model::find_by_statement(stmt)
.stream(tx)
.await?;
// let mut results = Vec::new();
// while let Some(result) = last_messages.next().await {
// let message = result?;
// results.push(proto::ChannelMessageId {
// channel_id: message.channel_id.to_proto(),
// message_id: message.id.to_proto(),
// });
// }
let mut results = Vec::new();
while let Some(result) = last_messages.next().await {
let message = result?;
results.push(proto::ChannelMessageId {
channel_id: message.channel_id.to_proto(),
message_id: message.id.to_proto(),
});
}
Ok(vec![])
}
fn get_notification_kind_id_by_name(&self, notification_kind: &str) -> Option<i32> {
self.notification_kinds_by_id
.iter()
.find(|(_, kind)| **kind == notification_kind)
.map(|kind| kind.0 .0)
Ok(results)
}
/// Removes the channel message with the given ID.
@@ -494,7 +486,7 @@ impl Database {
channel_id: ChannelId,
message_id: MessageId,
user_id: UserId,
) -> Result<(Vec<ConnectionId>, Vec<NotificationId>)> {
) -> Result<Vec<ConnectionId>> {
self.transaction(|tx| async move {
let mut rows = channel_chat_participant::Entity::find()
.filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
@@ -539,29 +531,7 @@ impl Database {
}
}
let notification_kind_id =
self.get_notification_kind_id_by_name("ChannelMessageMention");
let existing_notifications = notification::Entity::find()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.select_column(notification::Column::Id)
.all(&*tx)
.await?;
let existing_notification_ids = existing_notifications
.into_iter()
.map(|notification| notification.id)
.collect();
// remove all the mention notifications for this message
notification::Entity::delete_many()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.exec(&*tx)
.await?;
Ok((participant_connection_ids, existing_notification_ids))
Ok(participant_connection_ids)
})
.await
}
@@ -659,44 +629,14 @@ impl Database {
.await?;
}
let mut update_mention_user_ids = HashSet::default();
let mut new_mention_user_ids =
mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
let mut mentioned_user_ids = mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
// Filter out users that were mentioned before
for mention in &old_mentions {
if new_mention_user_ids.contains(&mention.user_id.to_proto()) {
update_mention_user_ids.insert(mention.user_id.to_proto());
}
new_mention_user_ids.remove(&mention.user_id.to_proto());
}
let notification_kind_id =
self.get_notification_kind_id_by_name("ChannelMessageMention");
let existing_notifications = notification::Entity::find()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.all(&*tx)
.await?;
// determine which notifications should be updated or deleted
let mut deleted_notification_ids = HashSet::default();
let mut updated_mention_notifications = Vec::new();
for notification in existing_notifications {
if update_mention_user_ids.contains(&notification.recipient_id.to_proto()) {
if let Some(notification) =
self::notifications::model_to_proto(self, notification).log_err()
{
updated_mention_notifications.push(notification);
}
} else {
deleted_notification_ids.insert(notification.id);
}
for mention in old_mentions {
mentioned_user_ids.remove(&mention.user_id.to_proto());
}
let mut notifications = Vec::new();
for mentioned_user in new_mention_user_ids {
for mentioned_user in mentioned_user_ids {
notifications.extend(
self.create_notification(
UserId::from_proto(mentioned_user),
@@ -718,10 +658,6 @@ impl Database {
notifications,
reply_to_message_id: channel_message.reply_to_message_id,
timestamp: channel_message.sent_at,
deleted_mention_notification_ids: deleted_notification_ids
.into_iter()
.collect::<Vec<_>>(),
updated_mention_notifications,
})
})
.await

View File

@@ -1,6 +1,5 @@
use super::*;
use rpc::Notification;
use util::ResultExt;
impl Database {
/// Initializes the different kinds of notifications by upserting records for them.
@@ -54,8 +53,11 @@ impl Database {
.await?;
while let Some(row) = rows.next().await {
let row = row?;
if let Some(proto) = model_to_proto(self, row).log_err() {
let kind = row.kind;
if let Some(proto) = model_to_proto(self, row) {
result.push(proto);
} else {
log::warn!("unknown notification kind {:?}", kind);
}
}
result.reverse();
@@ -198,9 +200,7 @@ impl Database {
})
.exec(tx)
.await?;
Ok(model_to_proto(self, row)
.map(|notification| (recipient_id, notification))
.ok())
Ok(model_to_proto(self, row).map(|notification| (recipient_id, notification)))
} else {
Ok(None)
}
@@ -241,12 +241,9 @@ impl Database {
}
}
pub fn model_to_proto(this: &Database, row: notification::Model) -> Result<proto::Notification> {
let kind = this
.notification_kinds_by_id
.get(&row.kind)
.ok_or_else(|| anyhow!("Unknown notification kind"))?;
Ok(proto::Notification {
fn model_to_proto(this: &Database, row: notification::Model) -> Option<proto::Notification> {
let kind = this.notification_kinds_by_id.get(&row.kind)?;
Some(proto::Notification {
id: row.id.to_proto(),
kind: kind.to_string(),
timestamp: row.created_at.assume_utc().unix_timestamp() as u64,

View File

@@ -1,5 +1,3 @@
use util::ResultExt;
use super::*;
impl Database {
@@ -30,7 +28,7 @@ impl Database {
room_id: RoomId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
) -> Result<RoomGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
.filter(
@@ -67,7 +65,6 @@ impl Database {
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
remote_project_id: ActiveValue::Set(None),
}
.insert(&*tx)
.await?;
@@ -111,22 +108,20 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("project not found"))?;
if project.host_connection()? == connection {
let room = if let Some(room_id) = project.room_id {
Some(self.get_room(room_id, &tx).await?)
} else {
None
};
project::Entity::delete(project.into_active_model())
.exec(&*tx)
.await?;
let room = self.get_room(room_id, &tx).await?;
Ok((room, guest_connection_ids))
} else {
Err(anyhow!("cannot unshare a project hosted by another user"))?
@@ -141,8 +136,9 @@ impl Database {
project_id: ProjectId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> Result<TransactionGuard<(Option<proto::Room>, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let project = project::Entity::find_by_id(project_id)
.filter(
Condition::all()
@@ -158,14 +154,12 @@ impl Database {
self.update_project_worktrees(project.id, worktrees, &tx)
.await?;
let room_id = project
.room_id
.ok_or_else(|| anyhow!("project not in a room"))?;
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
let room = if let Some(room_id) = project.room_id {
Some(self.get_room(room_id, &tx).await?)
} else {
None
};
let room = self.get_room(room_id, &tx).await?;
Ok((room, guest_connection_ids))
})
.await
@@ -210,10 +204,11 @@ impl Database {
&self,
update: &proto::UpdateWorktree,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
) -> Result<RoomGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
// Ensure the update comes from the host.
let _project = project::Entity::find_by_id(project_id)
.filter(
@@ -365,10 +360,11 @@ impl Database {
&self,
update: &proto::UpdateDiagnosticSummary,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
) -> Result<RoomGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
let worktree_id = update.worktree_id as i64;
self.project_transaction(project_id, |tx| async move {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let summary = update
.summary
.as_ref()
@@ -419,9 +415,10 @@ impl Database {
&self,
update: &proto::StartLanguageServer,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
) -> Result<RoomGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
self.project_transaction(project_id, |tx| async move {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let server = update
.server
.as_ref()
@@ -464,9 +461,10 @@ impl Database {
&self,
update: &proto::UpdateWorktreeSettings,
connection: ConnectionId,
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
) -> Result<RoomGuard<Vec<ConnectionId>>> {
let project_id = ProjectId::from_proto(update.project_id);
self.project_transaction(project_id, |tx| async move {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
// Ensure the update comes from the host.
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
@@ -544,36 +542,46 @@ impl Database {
.await
}
pub async fn get_project(&self, id: ProjectId) -> Result<project::Model> {
self.transaction(|tx| async move {
Ok(project::Entity::find_by_id(id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?)
})
.await
}
/// Adds the given connection to the specified project
/// in the current room.
pub async fn join_project(
pub async fn join_project_in_room(
&self,
project_id: ProjectId,
connection: ConnectionId,
user_id: UserId,
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
self.project_transaction(project_id, |tx| async move {
let (project, role) = self
.access_project(
project_id,
connection,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
) -> Result<RoomGuard<(Project, ReplicaId)>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
.filter(
Condition::all()
.add(
room_participant::Column::AnsweringConnectionId
.eq(connection.id as i32),
)
.add(
room_participant::Column::AnsweringConnectionServerId
.eq(connection.owner_id as i32),
),
)
.await?;
self.join_project_internal(project, user_id, connection, role, &tx)
.await
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("must join a room first"))?;
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
if project.room_id != Some(participant.room_id) {
return Err(anyhow!("no such project"))?;
}
self.join_project_internal(
project,
participant.user_id,
connection,
participant.role.unwrap_or(ChannelRole::Member),
&tx,
)
.await
})
.await
}
@@ -806,8 +814,9 @@ impl Database {
&self,
project_id: ProjectId,
connection: ConnectionId,
) -> Result<TransactionGuard<(Option<proto::Room>, LeftProject)>> {
self.project_transaction(project_id, |tx| async move {
) -> Result<RoomGuard<(proto::Room, LeftProject)>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let result = project_collaborator::Entity::delete_many()
.filter(
Condition::all()
@@ -862,12 +871,7 @@ impl Database {
.exec(&*tx)
.await?;
let room = if let Some(room_id) = project.room_id {
Some(self.get_room(room_id, &tx).await?)
} else {
None
};
let room = self.get_room(room_id, &tx).await?;
let left_project = LeftProject {
id: project_id,
host_user_id: project.host_user_id,
@@ -884,15 +888,17 @@ impl Database {
project_id: ProjectId,
connection_id: ConnectionId,
) -> Result<()> {
self.project_transaction(project_id, |tx| async move {
project::Entity::find()
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
project_collaborator::Entity::find()
.filter(
Condition::all()
.add(project::Column::Id.eq(project_id))
.add(project::Column::HostConnectionId.eq(Some(connection_id.id as i32)))
.add(project_collaborator::Column::ProjectId.eq(project_id))
.add(project_collaborator::Column::IsHost.eq(true))
.add(project_collaborator::Column::ConnectionId.eq(connection_id.id))
.add(
project::Column::HostConnectionServerId
.eq(Some(connection_id.owner_id as i32)),
project_collaborator::Column::ConnectionServerId
.eq(connection_id.owner_id),
),
)
.one(&*tx)
@@ -905,90 +911,39 @@ impl Database {
.map(|guard| guard.into_inner())
}
/// Returns the current project if the given user is authorized to access it with the specified capability.
pub async fn access_project(
&self,
project_id: ProjectId,
connection_id: ConnectionId,
principal_id: PrincipalId,
capability: Capability,
tx: &DatabaseTransaction,
) -> Result<(project::Model, ChannelRole)> {
let (project, remote_project) = project::Entity::find_by_id(project_id)
.find_also_related(remote_project::Entity)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
let user_id = match principal_id {
PrincipalId::DevServerId(_) => {
if project
.host_connection()
.is_ok_and(|connection| connection == connection_id)
{
return Ok((project, ChannelRole::Admin));
}
return Err(anyhow!("not the project host"))?;
}
PrincipalId::UserId(user_id) => user_id,
};
let role = if let Some(remote_project) = remote_project {
let channel = channel::Entity::find_by_id(remote_project.channel_id)
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such channel"))?;
self.check_user_is_channel_participant(&channel, user_id, &tx)
.await?
} else if let Some(room_id) = project.room_id {
// what's the users role?
let current_participant = room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
.one(tx)
.await?
.ok_or_else(|| anyhow!("no such room"))?;
current_participant.role.unwrap_or(ChannelRole::Guest)
} else {
return Err(anyhow!("not authorized to read projects"))?;
};
match capability {
Capability::ReadWrite => {
if !role.can_edit_projects() {
return Err(anyhow!("not authorized to edit projects"))?;
}
}
Capability::ReadOnly => {
if !role.can_read_projects() {
return Err(anyhow!("not authorized to read projects"))?;
}
}
}
Ok((project, role))
}
/// Returns the host connection for a read-only request to join a shared project.
pub async fn host_for_read_only_project_request(
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadOnly,
&tx,
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let current_participant = room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such room"))?;
if !current_participant
.role
.map_or(false, |role| role.can_read_projects())
{
Err(anyhow!("not authorized to read projects"))?;
}
let host = project_collaborator::Entity::find()
.filter(
project_collaborator::Column::ProjectId
.eq(project_id)
.and(project_collaborator::Column::IsHost.eq(true)),
)
.await?;
project.host_connection()
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("failed to read project host"))?;
Ok(host.connection())
})
.await
.map(|guard| guard.into_inner())
@@ -999,56 +954,83 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
user_id: UserId,
) -> Result<ConnectionId> {
self.project_transaction(project_id, |tx| async move {
let (project, _) = self
.access_project(
project_id,
connection_id,
PrincipalId::UserId(user_id),
Capability::ReadWrite,
&tx,
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let current_participant = room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such room"))?;
if !current_participant
.role
.map_or(false, |role| role.can_edit_projects())
{
Err(anyhow!("not authorized to edit projects"))?;
}
let host = project_collaborator::Entity::find()
.filter(
project_collaborator::Column::ProjectId
.eq(project_id)
.and(project_collaborator::Column::IsHost.eq(true)),
)
.await?;
project.host_connection()
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("failed to read project host"))?;
Ok(host.connection())
})
.await
.map(|guard| guard.into_inner())
}
pub async fn connections_for_buffer_update(
pub async fn project_collaborators_for_buffer_update(
&self,
project_id: ProjectId,
principal_id: PrincipalId,
connection_id: ConnectionId,
capability: Capability,
) -> Result<TransactionGuard<(ConnectionId, Vec<ConnectionId>)>> {
self.project_transaction(project_id, |tx| async move {
// Authorize
let (project, _) = self
.access_project(project_id, connection_id, principal_id, capability, &tx)
.await?;
requires_write: bool,
) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let current_participant = room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))
.filter(room_participant::Column::AnsweringConnectionId.eq(connection_id.id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such room"))?;
let host_connection_id = project.host_connection()?;
if requires_write
&& !current_participant
.role
.map_or(false, |role| role.can_edit_projects())
{
Err(anyhow!("not authorized to edit projects"))?;
}
let collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.eq(project_id))
.all(&*tx)
.await?;
let guest_connection_ids = collaborators
.await?
.into_iter()
.filter_map(|collaborator| {
if collaborator.is_host {
None
} else {
Some(collaborator.connection())
}
.map(|collaborator| ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect();
.collect::<Vec<_>>();
Ok((host_connection_id, guest_connection_ids))
if collaborators
.iter()
.any(|collaborator| collaborator.connection_id == connection_id)
{
Ok(collaborators)
} else {
Err(anyhow!("no such project"))?
}
})
.await
}
@@ -1061,39 +1043,24 @@ impl Database {
&self,
project_id: ProjectId,
connection_id: ConnectionId,
exclude_dev_server: bool,
) -> Result<TransactionGuard<HashSet<ConnectionId>>> {
self.project_transaction(project_id, |tx| async move {
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such project"))?;
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
let room_id = self.room_id_for_project(project_id).await?;
self.room_transaction(room_id, |tx| async move {
let mut collaborators = project_collaborator::Entity::find()
.filter(project_collaborator::Column::ProjectId.eq(project_id))
.stream(&*tx)
.await?;
let mut connection_ids = HashSet::default();
if let Some(host_connection) = project.host_connection().log_err() {
if !exclude_dev_server {
connection_ids.insert(host_connection);
}
}
while let Some(collaborator) = collaborators.next().await {
let collaborator = collaborator?;
connection_ids.insert(collaborator.connection());
}
if connection_ids.contains(&connection_id)
|| Some(connection_id) == project.host_connection().ok()
{
if connection_ids.contains(&connection_id) {
Ok(connection_ids)
} else {
Err(anyhow!(
"can only send project updates to a project you're in"
))?
Err(anyhow!("no such project"))?
}
})
.await
@@ -1122,12 +1089,15 @@ impl Database {
}
/// Returns the [`RoomId`] for the given project.
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<Option<RoomId>> {
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<RoomId> {
self.transaction(|tx| async move {
Ok(project::Entity::find_by_id(project_id)
let project = project::Entity::find_by_id(project_id)
.one(&*tx)
.await?
.and_then(|project| project.room_id))
.ok_or_else(|| anyhow!("project {} not found", project_id))?;
Ok(project
.room_id
.ok_or_else(|| anyhow!("project not in room"))?)
})
.await
}
@@ -1172,7 +1142,7 @@ impl Database {
project_id: ProjectId,
leader_connection: ConnectionId,
follower_connection: ConnectionId,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async move {
follower::ActiveModel {
room_id: ActiveValue::set(room_id),
@@ -1203,7 +1173,7 @@ impl Database {
project_id: ProjectId,
leader_connection: ConnectionId,
follower_connection: ConnectionId,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async move {
follower::Entity::delete_many()
.filter(

View File

@@ -1,261 +0,0 @@
use anyhow::anyhow;
use rpc::{proto, ConnectionId};
use sea_orm::{
ActiveModelTrait, ActiveValue, ColumnTrait, Condition, DatabaseTransaction, EntityTrait,
ModelTrait, QueryFilter,
};
use crate::db::ProjectId;
use super::{
channel, project, project_collaborator, remote_project, worktree, ChannelId, Database,
DevServerId, RejoinedProject, RemoteProjectId, ResharedProject, ServerId, UserId,
};
impl Database {
pub async fn get_remote_project(
&self,
remote_project_id: RemoteProjectId,
) -> crate::Result<remote_project::Model> {
self.transaction(|tx| async move {
Ok(remote_project::Entity::find_by_id(remote_project_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?)
})
.await
}
pub async fn get_remote_projects(
&self,
channel_ids: &Vec<ChannelId>,
tx: &DatabaseTransaction,
) -> crate::Result<Vec<proto::RemoteProject>> {
let servers = remote_project::Entity::find()
.filter(remote_project::Column::ChannelId.is_in(channel_ids.iter().map(|id| id.0)))
.find_also_related(project::Entity)
.all(tx)
.await?;
Ok(servers
.into_iter()
.map(|(remote_project, project)| proto::RemoteProject {
id: remote_project.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
channel_id: remote_project.channel_id.to_proto(),
name: remote_project.name,
dev_server_id: remote_project.dev_server_id.to_proto(),
path: remote_project.path,
})
.collect())
}
pub async fn get_remote_projects_for_dev_server(
&self,
dev_server_id: DevServerId,
) -> crate::Result<Vec<proto::RemoteProject>> {
self.transaction(|tx| async move {
let servers = remote_project::Entity::find()
.filter(remote_project::Column::DevServerId.eq(dev_server_id))
.find_also_related(project::Entity)
.all(&*tx)
.await?;
Ok(servers
.into_iter()
.map(|(remote_project, project)| proto::RemoteProject {
id: remote_project.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
channel_id: remote_project.channel_id.to_proto(),
name: remote_project.name,
dev_server_id: remote_project.dev_server_id.to_proto(),
path: remote_project.path,
})
.collect())
})
.await
}
pub async fn get_stale_dev_server_projects(
&self,
connection: ConnectionId,
) -> crate::Result<Vec<ProjectId>> {
self.transaction(|tx| async move {
let projects = project::Entity::find()
.filter(
Condition::all()
.add(project::Column::HostConnectionId.eq(connection.id))
.add(project::Column::HostConnectionServerId.eq(connection.owner_id)),
)
.all(&*tx)
.await?;
Ok(projects.into_iter().map(|p| p.id).collect())
})
.await
}
pub async fn create_remote_project(
&self,
channel_id: ChannelId,
dev_server_id: DevServerId,
name: &str,
path: &str,
user_id: UserId,
) -> crate::Result<(channel::Model, remote_project::Model)> {
self.transaction(|tx| async move {
let channel = self.get_channel_internal(channel_id, &tx).await?;
self.check_user_is_channel_admin(&channel, user_id, &tx)
.await?;
let project = remote_project::Entity::insert(remote_project::ActiveModel {
name: ActiveValue::Set(name.to_string()),
id: ActiveValue::NotSet,
channel_id: ActiveValue::Set(channel_id),
dev_server_id: ActiveValue::Set(dev_server_id),
path: ActiveValue::Set(path.to_string()),
})
.exec_with_returning(&*tx)
.await?;
Ok((channel, project))
})
.await
}
pub async fn share_remote_project(
&self,
remote_project_id: RemoteProjectId,
dev_server_id: DevServerId,
connection: ConnectionId,
worktrees: &[proto::WorktreeMetadata],
) -> crate::Result<proto::RemoteProject> {
self.transaction(|tx| async move {
let remote_project = remote_project::Entity::find_by_id(remote_project_id)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no remote project with id {}", remote_project_id))?;
if remote_project.dev_server_id != dev_server_id {
return Err(anyhow!("remote project shared from wrong server"))?;
}
let project = project::ActiveModel {
room_id: ActiveValue::Set(None),
host_user_id: ActiveValue::Set(None),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
id: ActiveValue::NotSet,
hosted_project_id: ActiveValue::Set(None),
remote_project_id: ActiveValue::Set(Some(remote_project_id)),
}
.insert(&*tx)
.await?;
if !worktrees.is_empty() {
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
worktree::ActiveModel {
id: ActiveValue::set(worktree.id as i64),
project_id: ActiveValue::set(project.id),
abs_path: ActiveValue::set(worktree.abs_path.clone()),
root_name: ActiveValue::set(worktree.root_name.clone()),
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
}
}))
.exec(&*tx)
.await?;
}
Ok(remote_project.to_proto(Some(project)))
})
.await
}
pub async fn reshare_remote_projects(
&self,
reshared_projects: &Vec<proto::UpdateProject>,
dev_server_id: DevServerId,
connection: ConnectionId,
) -> crate::Result<Vec<ResharedProject>> {
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
self.transaction(|tx| async move {
let mut ret = Vec::new();
for reshared_project in reshared_projects {
let project_id = ProjectId::from_proto(reshared_project.project_id);
let (project, remote_project) = project::Entity::find_by_id(project_id)
.find_also_related(remote_project::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("project does not exist"))?;
if remote_project.map(|rp| rp.dev_server_id) != Some(dev_server_id) {
return Err(anyhow!("remote project reshared from wrong server"))?;
}
let Ok(old_connection_id) = project.host_connection() else {
return Err(anyhow!("remote project was not shared"))?;
};
project::Entity::update(project::ActiveModel {
id: ActiveValue::set(project_id),
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
host_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
..Default::default()
})
.exec(&*tx)
.await?;
let collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
.await?;
self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
.await?;
ret.push(super::ResharedProject {
id: project_id,
old_connection_id,
collaborators: collaborators
.iter()
.map(|collaborator| super::ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect(),
worktrees: reshared_project.worktrees.clone(),
});
}
Ok(ret)
})
.await
}
pub async fn rejoin_remote_projects(
&self,
rejoined_projects: &Vec<proto::RejoinProject>,
user_id: UserId,
connection_id: ConnectionId,
) -> crate::Result<Vec<RejoinedProject>> {
// todo!() project_transaction? (maybe we can make the lock per-dev-server instead of per-project?)
self.transaction(|tx| async move {
let mut ret = Vec::new();
for rejoined_project in rejoined_projects {
if let Some(project) = self
.rejoin_project_internal(&tx, rejoined_project, user_id, connection_id)
.await?
{
ret.push(project);
}
}
Ok(ret)
})
.await
}
}

View File

@@ -6,7 +6,7 @@ impl Database {
&self,
room_id: RoomId,
new_server_id: ServerId,
) -> Result<TransactionGuard<RefreshedRoom>> {
) -> Result<RoomGuard<RefreshedRoom>> {
self.room_transaction(room_id, |tx| async move {
let stale_participant_filter = Condition::all()
.add(room_participant::Column::RoomId.eq(room_id))
@@ -149,7 +149,7 @@ impl Database {
calling_connection: ConnectionId,
called_user_id: UserId,
initial_project_id: Option<ProjectId>,
) -> Result<TransactionGuard<(proto::Room, proto::IncomingCall)>> {
) -> Result<RoomGuard<(proto::Room, proto::IncomingCall)>> {
self.room_transaction(room_id, |tx| async move {
let caller = room_participant::Entity::find()
.filter(
@@ -201,7 +201,7 @@ impl Database {
&self,
room_id: RoomId,
called_user_id: UserId,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async move {
room_participant::Entity::delete_many()
.filter(
@@ -221,7 +221,7 @@ impl Database {
&self,
expected_room_id: Option<RoomId>,
user_id: UserId,
) -> Result<Option<TransactionGuard<proto::Room>>> {
) -> Result<Option<RoomGuard<proto::Room>>> {
self.optional_room_transaction(|tx| async move {
let mut filter = Condition::all()
.add(room_participant::Column::UserId.eq(user_id))
@@ -258,7 +258,7 @@ impl Database {
room_id: RoomId,
calling_connection: ConnectionId,
called_user_id: UserId,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
.filter(
@@ -294,7 +294,7 @@ impl Database {
room_id: RoomId,
user_id: UserId,
connection: ConnectionId,
) -> Result<TransactionGuard<JoinRoom>> {
) -> Result<RoomGuard<JoinRoom>> {
self.room_transaction(room_id, |tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryChannelId {
@@ -349,17 +349,6 @@ impl Database {
.await
}
pub async fn stale_room_connection(&self, user_id: UserId) -> Result<Option<ConnectionId>> {
self.transaction(|tx| async move {
let participant = room_participant::Entity::find()
.filter(room_participant::Column::UserId.eq(user_id))
.one(&*tx)
.await?;
Ok(participant.and_then(|p| p.answering_connection()))
})
.await
}
async fn get_next_participant_index_internal(
&self,
room_id: RoomId,
@@ -414,50 +403,39 @@ impl Database {
.get_next_participant_index_internal(room_id, tx)
.await?;
// If someone has been invited into the room, accept the invite instead of inserting
let result = room_participant::Entity::update_many()
.filter(
Condition::all()
.add(room_participant::Column::RoomId.eq(room_id))
.add(room_participant::Column::UserId.eq(user_id))
.add(room_participant::Column::AnsweringConnectionId.is_null()),
)
.set(room_participant::ActiveModel {
participant_index: ActiveValue::Set(Some(participant_index)),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
..Default::default()
})
.exec(tx)
.await?;
if result.rows_affected == 0 {
room_participant::Entity::insert(room_participant::ActiveModel {
room_id: ActiveValue::set(room_id),
user_id: ActiveValue::set(user_id),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
calling_user_id: ActiveValue::set(user_id),
calling_connection_id: ActiveValue::set(connection.id as i32),
calling_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
participant_index: ActiveValue::Set(Some(participant_index)),
role: ActiveValue::set(Some(role)),
id: ActiveValue::NotSet,
location_kind: ActiveValue::NotSet,
location_project_id: ActiveValue::NotSet,
initial_project_id: ActiveValue::NotSet,
})
.exec(tx)
.await?;
}
room_participant::Entity::insert_many([room_participant::ActiveModel {
room_id: ActiveValue::set(room_id),
user_id: ActiveValue::set(user_id),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
calling_user_id: ActiveValue::set(user_id),
calling_connection_id: ActiveValue::set(connection.id as i32),
calling_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
participant_index: ActiveValue::Set(Some(participant_index)),
role: ActiveValue::set(Some(role)),
id: ActiveValue::NotSet,
location_kind: ActiveValue::NotSet,
location_project_id: ActiveValue::NotSet,
initial_project_id: ActiveValue::NotSet,
}])
.on_conflict(
OnConflict::columns([room_participant::Column::UserId])
.update_columns([
room_participant::Column::AnsweringConnectionId,
room_participant::Column::AnsweringConnectionServerId,
room_participant::Column::AnsweringConnectionLost,
room_participant::Column::ParticipantIndex,
room_participant::Column::Role,
])
.to_owned(),
)
.exec(tx)
.await?;
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
@@ -472,7 +450,7 @@ impl Database {
rejoin_room: proto::RejoinRoom,
user_id: UserId,
connection: ConnectionId,
) -> Result<TransactionGuard<RejoinedRoom>> {
) -> Result<RoomGuard<RejoinedRoom>> {
let room_id = RoomId::from_proto(rejoin_room.id);
self.room_transaction(room_id, |tx| async {
let tx = tx;
@@ -572,12 +550,180 @@ impl Database {
let mut rejoined_projects = Vec::new();
for rejoined_project in &rejoin_room.rejoined_projects {
if let Some(rejoined_project) = self
.rejoin_project_internal(&tx, rejoined_project, user_id, connection)
.await?
{
rejoined_projects.push(rejoined_project);
let project_id = ProjectId::from_proto(rejoined_project.id);
let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
continue;
};
let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
for db_worktree in db_worktrees {
let mut worktree = RejoinedWorktree {
id: db_worktree.id as u64,
abs_path: db_worktree.abs_path,
root_name: db_worktree.root_name,
visible: db_worktree.visible,
updated_entries: Default::default(),
removed_entries: Default::default(),
updated_repositories: Default::default(),
removed_repositories: Default::default(),
diagnostic_summaries: Default::default(),
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
};
let rejoined_worktree = rejoined_project
.worktrees
.iter()
.find(|worktree| worktree.id == db_worktree.id as u64);
// File entries
{
let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
} else {
worktree_entry::Column::IsDeleted.eq(false)
};
let mut db_entries = worktree_entry::Entity::find()
.filter(
Condition::all()
.add(worktree_entry::Column::ProjectId.eq(project.id))
.add(worktree_entry::Column::WorktreeId.eq(worktree.id))
.add(entry_filter),
)
.stream(&*tx)
.await?;
while let Some(db_entry) = db_entries.next().await {
let db_entry = db_entry?;
if db_entry.is_deleted {
worktree.removed_entries.push(db_entry.id as u64);
} else {
worktree.updated_entries.push(proto::Entry {
id: db_entry.id as u64,
is_dir: db_entry.is_dir,
path: db_entry.path,
inode: db_entry.inode as u64,
mtime: Some(proto::Timestamp {
seconds: db_entry.mtime_seconds as u64,
nanos: db_entry.mtime_nanos as u32,
}),
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
git_status: db_entry.git_status.map(|status| status as i32),
});
}
}
}
// Repository Entries
{
let repository_entry_filter =
if let Some(rejoined_worktree) = rejoined_worktree {
worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
} else {
worktree_repository::Column::IsDeleted.eq(false)
};
let mut db_repositories = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
.add(repository_entry_filter),
)
.stream(&*tx)
.await?;
while let Some(db_repository) = db_repositories.next().await {
let db_repository = db_repository?;
if db_repository.is_deleted {
worktree
.removed_repositories
.push(db_repository.work_directory_id as u64);
} else {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
});
}
}
}
worktrees.push(worktree);
}
let language_servers = project
.find_related(language_server::Entity)
.all(&*tx)
.await?
.into_iter()
.map(|language_server| proto::LanguageServer {
id: language_server.id as u64,
name: language_server.name,
})
.collect::<Vec<_>>();
{
let mut db_settings_files = worktree_settings_file::Entity::find()
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
.stream(&*tx)
.await?;
while let Some(db_settings_file) = db_settings_files.next().await {
let db_settings_file = db_settings_file?;
if let Some(worktree) = worktrees
.iter_mut()
.find(|w| w.id == db_settings_file.worktree_id as u64)
{
worktree.settings_files.push(WorktreeSettingsFile {
path: db_settings_file.path,
content: db_settings_file.content,
});
}
}
}
let mut collaborators = project
.find_related(project_collaborator::Entity)
.all(&*tx)
.await?;
let self_collaborator = if let Some(self_collaborator_ix) = collaborators
.iter()
.position(|collaborator| collaborator.user_id == user_id)
{
collaborators.swap_remove(self_collaborator_ix)
} else {
continue;
};
let old_connection_id = self_collaborator.connection();
project_collaborator::Entity::update(project_collaborator::ActiveModel {
connection_id: ActiveValue::set(connection.id as i32),
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
..self_collaborator.into_active_model()
})
.exec(&*tx)
.await?;
let collaborators = collaborators
.into_iter()
.map(|collaborator| ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect::<Vec<_>>();
rejoined_projects.push(RejoinedProject {
id: project_id,
old_connection_id,
collaborators,
worktrees,
language_servers,
});
}
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
@@ -592,192 +738,10 @@ impl Database {
.await
}
pub async fn rejoin_project_internal(
&self,
tx: &DatabaseTransaction,
rejoined_project: &proto::RejoinProject,
user_id: UserId,
connection: ConnectionId,
) -> Result<Option<RejoinedProject>> {
let project_id = ProjectId::from_proto(rejoined_project.id);
let Some(project) = project::Entity::find_by_id(project_id).one(tx).await? else {
return Ok(None);
};
let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(tx).await?;
for db_worktree in db_worktrees {
let mut worktree = RejoinedWorktree {
id: db_worktree.id as u64,
abs_path: db_worktree.abs_path,
root_name: db_worktree.root_name,
visible: db_worktree.visible,
updated_entries: Default::default(),
removed_entries: Default::default(),
updated_repositories: Default::default(),
removed_repositories: Default::default(),
diagnostic_summaries: Default::default(),
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
};
let rejoined_worktree = rejoined_project
.worktrees
.iter()
.find(|worktree| worktree.id == db_worktree.id as u64);
// File entries
{
let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
} else {
worktree_entry::Column::IsDeleted.eq(false)
};
let mut db_entries = worktree_entry::Entity::find()
.filter(
Condition::all()
.add(worktree_entry::Column::ProjectId.eq(project.id))
.add(worktree_entry::Column::WorktreeId.eq(worktree.id))
.add(entry_filter),
)
.stream(tx)
.await?;
while let Some(db_entry) = db_entries.next().await {
let db_entry = db_entry?;
if db_entry.is_deleted {
worktree.removed_entries.push(db_entry.id as u64);
} else {
worktree.updated_entries.push(proto::Entry {
id: db_entry.id as u64,
is_dir: db_entry.is_dir,
path: db_entry.path,
inode: db_entry.inode as u64,
mtime: Some(proto::Timestamp {
seconds: db_entry.mtime_seconds as u64,
nanos: db_entry.mtime_nanos as u32,
}),
is_symlink: db_entry.is_symlink,
is_ignored: db_entry.is_ignored,
is_external: db_entry.is_external,
git_status: db_entry.git_status.map(|status| status as i32),
});
}
}
}
// Repository Entries
{
let repository_entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
} else {
worktree_repository::Column::IsDeleted.eq(false)
};
let mut db_repositories = worktree_repository::Entity::find()
.filter(
Condition::all()
.add(worktree_repository::Column::ProjectId.eq(project.id))
.add(worktree_repository::Column::WorktreeId.eq(worktree.id))
.add(repository_entry_filter),
)
.stream(tx)
.await?;
while let Some(db_repository) = db_repositories.next().await {
let db_repository = db_repository?;
if db_repository.is_deleted {
worktree
.removed_repositories
.push(db_repository.work_directory_id as u64);
} else {
worktree.updated_repositories.push(proto::RepositoryEntry {
work_directory_id: db_repository.work_directory_id as u64,
branch: db_repository.branch,
});
}
}
}
worktrees.push(worktree);
}
let language_servers = project
.find_related(language_server::Entity)
.all(tx)
.await?
.into_iter()
.map(|language_server| proto::LanguageServer {
id: language_server.id as u64,
name: language_server.name,
})
.collect::<Vec<_>>();
{
let mut db_settings_files = worktree_settings_file::Entity::find()
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
.stream(tx)
.await?;
while let Some(db_settings_file) = db_settings_files.next().await {
let db_settings_file = db_settings_file?;
if let Some(worktree) = worktrees
.iter_mut()
.find(|w| w.id == db_settings_file.worktree_id as u64)
{
worktree.settings_files.push(WorktreeSettingsFile {
path: db_settings_file.path,
content: db_settings_file.content,
});
}
}
}
let mut collaborators = project
.find_related(project_collaborator::Entity)
.all(tx)
.await?;
let self_collaborator = if let Some(self_collaborator_ix) = collaborators
.iter()
.position(|collaborator| collaborator.user_id == user_id)
{
collaborators.swap_remove(self_collaborator_ix)
} else {
return Ok(None);
};
let old_connection_id = self_collaborator.connection();
project_collaborator::Entity::update(project_collaborator::ActiveModel {
connection_id: ActiveValue::set(connection.id as i32),
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
..self_collaborator.into_active_model()
})
.exec(tx)
.await?;
let collaborators = collaborators
.into_iter()
.map(|collaborator| ProjectCollaborator {
connection_id: collaborator.connection(),
user_id: collaborator.user_id,
replica_id: collaborator.replica_id,
is_host: collaborator.is_host,
})
.collect::<Vec<_>>();
return Ok(Some(RejoinedProject {
id: project_id,
old_connection_id,
collaborators,
worktrees,
language_servers,
}));
}
pub async fn leave_room(
&self,
connection: ConnectionId,
) -> Result<Option<TransactionGuard<LeftRoom>>> {
) -> Result<Option<RoomGuard<LeftRoom>>> {
self.optional_room_transaction(|tx| async move {
let leaving_participant = room_participant::Entity::find()
.filter(
@@ -949,7 +913,7 @@ impl Database {
room_id: RoomId,
connection: ConnectionId,
location: proto::ParticipantLocation,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async {
let tx = tx;
let location_kind;
@@ -1011,7 +975,7 @@ impl Database {
room_id: RoomId,
user_id: UserId,
role: ChannelRole,
) -> Result<TransactionGuard<proto::Room>> {
) -> Result<RoomGuard<proto::Room>> {
self.room_transaction(room_id, |tx| async move {
room_participant::Entity::find()
.filter(
@@ -1164,7 +1128,7 @@ impl Database {
&self,
room_id: RoomId,
connection_id: ConnectionId,
) -> Result<TransactionGuard<HashSet<ConnectionId>>> {
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
self.room_transaction(room_id, |tx| async move {
let mut participants = room_participant::Entity::find()
.filter(room_participant::Column::RoomId.eq(room_id))

View File

@@ -11,7 +11,6 @@ pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
pub mod dev_server;
pub mod embedding;
pub mod extension;
pub mod extension_version;
pub mod feature_flag;
@@ -25,7 +24,6 @@ pub mod observed_channel_messages;
pub mod project;
pub mod project_collaborator;
pub mod rate_buckets;
pub mod remote_project;
pub mod room;
pub mod room_participant;
pub mod server;

View File

@@ -1,5 +1,4 @@
use crate::db::{ChannelId, DevServerId};
use rpc::proto;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
@@ -16,14 +15,3 @@ impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl Model {
pub fn to_proto(&self, status: proto::DevServerStatus) -> proto::DevServer {
proto::DevServer {
dev_server_id: self.id.to_proto(),
channel_id: self.channel_id.to_proto(),
name: self.name.clone(),
status: status as i32,
}
}
}

View File

@@ -1,18 +0,0 @@
use sea_orm::entity::prelude::*;
use time::PrimitiveDateTime;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "embeddings")]
pub struct Model {
#[sea_orm(primary_key)]
pub model: String,
#[sea_orm(primary_key)]
pub digest: Vec<u8>,
pub dimensions: Vec<f32>,
pub retrieved_at: PrimitiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,4 +1,4 @@
use crate::db::{HostedProjectId, ProjectId, RemoteProjectId, Result, RoomId, ServerId, UserId};
use crate::db::{HostedProjectId, ProjectId, Result, RoomId, ServerId, UserId};
use anyhow::anyhow;
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
@@ -13,7 +13,6 @@ pub struct Model {
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
pub hosted_project_id: Option<HostedProjectId>,
pub remote_project_id: Option<RemoteProjectId>,
}
impl Model {
@@ -57,12 +56,6 @@ pub enum Relation {
to = "super::hosted_project::Column::Id"
)]
HostedProject,
#[sea_orm(
belongs_to = "super::remote_project::Entity",
from = "Column::RemoteProjectId",
to = "super::remote_project::Column::Id"
)]
RemoteProject,
}
impl Related<super::user::Entity> for Entity {
@@ -101,10 +94,4 @@ impl Related<super::hosted_project::Entity> for Entity {
}
}
impl Related<super::remote_project::Entity> for Entity {
fn to() -> RelationDef {
Relation::RemoteProject.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,42 +0,0 @@
use super::project;
use crate::db::{ChannelId, DevServerId, RemoteProjectId};
use rpc::proto;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "remote_projects")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: RemoteProjectId,
pub channel_id: ChannelId,
pub dev_server_id: DevServerId,
pub name: String,
pub path: String,
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_one = "super::project::Entity")]
Project,
}
impl Related<super::project::Entity> for Entity {
fn to() -> RelationDef {
Relation::Project.def()
}
}
impl Model {
pub fn to_proto(&self, project: Option<project::Model>) -> proto::RemoteProject {
proto::RemoteProject {
id: self.id.to_proto(),
project_id: project.map(|p| p.id.to_proto()),
channel_id: self.channel_id.to_proto(),
dev_server_id: self.dev_server_id.to_proto(),
name: self.name.clone(),
path: self.path.clone(),
}
}
}

View File

@@ -2,7 +2,6 @@ mod buffer_tests;
mod channel_tests;
mod contributor_tests;
mod db_tests;
mod embedding_tests;
mod extension_tests;
mod feature_flag_tests;
mod message_tests;

View File

@@ -1,84 +0,0 @@
use super::TestDb;
use crate::db::embedding;
use collections::HashMap;
use sea_orm::{sea_query::Expr, ColumnTrait, EntityTrait, QueryFilter};
use std::ops::Sub;
use time::{Duration, OffsetDateTime, PrimitiveDateTime};
// SQLite does not support array arguments, so we only test this against a real postgres instance
#[gpui::test]
async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
let test_db = TestDb::postgres(cx.executor().clone());
let db = test_db.db();
let provider = "test_model";
let digest1 = vec![1, 2, 3];
let digest2 = vec![4, 5, 6];
let embeddings = HashMap::from_iter([
(digest1.clone(), vec![0.1, 0.2, 0.3]),
(digest2.clone(), vec![0.4, 0.5, 0.6]),
]);
// Save embeddings
db.save_embeddings(provider, &embeddings).await.unwrap();
// Retrieve embeddings
let retrieved_embeddings = db
.get_embeddings(provider, &[digest1.clone(), digest2.clone()])
.await
.unwrap();
assert_eq!(retrieved_embeddings.len(), 2);
assert!(retrieved_embeddings.contains_key(&digest1));
assert!(retrieved_embeddings.contains_key(&digest2));
// Check if the retrieved embeddings are correct
assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]);
assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]);
}
#[gpui::test]
async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
let test_db = TestDb::postgres(cx.executor().clone());
let db = test_db.db();
let model = "test_model";
let digest = vec![7, 8, 9];
let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]);
// Save old embeddings
db.save_embeddings(model, &embeddings).await.unwrap();
// Reach into the DB and change the retrieved at to be > 60 days
db.weak_transaction(|tx| {
let digest = digest.clone();
async move {
let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));
let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time());
embedding::Entity::update_many()
.filter(
embedding::Column::Model
.eq(model)
.and(embedding::Column::Digest.eq(digest)),
)
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
.exec(&*tx)
.await
.unwrap();
Ok(())
}
})
.await
.unwrap();
// Purge old embeddings
db.purge_old_embeddings().await.unwrap();
// Try to retrieve the purged embeddings
let retrieved_embeddings = db.get_embeddings(model, &[digest.clone()]).await.unwrap();
assert!(
retrieved_embeddings.is_empty(),
"Old embeddings should have been purged"
);
}

View File

@@ -1,5 +1,4 @@
use super::Database;
use crate::db::ExtensionVersionConstraints;
use crate::{
db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion},
test_both_dbs,
@@ -279,108 +278,3 @@ async fn test_extensions(db: &Arc<Database>) {
]
);
}
test_both_dbs!(
test_extensions_by_id,
test_extensions_by_id_postgres,
test_extensions_by_id_sqlite
);
async fn test_extensions_by_id(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
let t0_chrono = convert_time_to_chrono(t0);
db.insert_extension_versions(
&[
(
"ext1",
vec![
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.1").unwrap(),
description: "an extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.2").unwrap(),
description: "a good extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.3").unwrap(),
description: "a real good extension".into(),
authors: vec!["max".into(), "marshall".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.5".into()),
published_at: t0,
},
],
),
(
"ext2",
vec![NewExtensionVersion {
name: "Extension 2".into(),
version: semver::Version::parse("0.2.0").unwrap(),
description: "a great extension".into(),
authors: vec!["marshall".into()],
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
published_at: t0,
}],
),
]
.into_iter()
.collect(),
)
.await
.unwrap();
let extensions = db
.get_extensions_by_ids(
&["ext1"],
Some(&ExtensionVersionConstraints {
schema_versions: 1..=1,
wasm_api_versions: "0.0.1".parse().unwrap()..="0.0.4".parse().unwrap(),
}),
)
.await
.unwrap();
assert_eq!(
extensions,
&[ExtensionMetadata {
id: "ext1".into(),
manifest: rpc::ExtensionApiManifest {
name: "Extension 1".into(),
version: "0.0.2".into(),
authors: vec!["max".into()],
description: Some("a good extension".into()),
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: Some("0.0.4".into()),
},
published_at: t0_chrono,
download_count: 0,
}]
);
}

View File

@@ -134,7 +134,6 @@ pub struct Config {
pub zed_environment: Arc<str>,
pub openai_api_key: Option<Arc<str>>,
pub google_ai_api_key: Option<Arc<str>>,
pub anthropic_api_key: Option<Arc<str>>,
pub zed_client_checksum_seed: Option<String>,
pub slack_panics_webhook: Option<String>,
pub auto_join_channel_id: Option<ChannelId>,

View File

@@ -6,8 +6,8 @@ use axum::{
Extension, Router,
};
use collab::{
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor,
rpc::ResultExt, AppState, Config, RateLimiter, Result,
api::fetch_extensions_from_blob_store_periodically, db, env, executor::Executor, AppState,
Config, RateLimiter, Result,
};
use db::Database;
use std::{
@@ -23,7 +23,7 @@ use tower_http::trace::TraceLayer;
use tracing_subscriber::{
filter::EnvFilter, fmt::format::JsonFields, util::SubscriberInitExt, Layer,
};
use util::ResultExt as _;
use util::ResultExt;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const REVISION: Option<&'static str> = option_env!("GITHUB_SHA");
@@ -90,7 +90,6 @@ async fn main() -> Result<()> {
};
if is_collab {
state.db.purge_old_embeddings().await.trace_err();
RateLimiter::save_periodically(state.rate_limiter.clone(), state.executor.clone());
}
@@ -138,38 +137,18 @@ async fn main() -> Result<()> {
);
#[cfg(unix)]
let signal = async move {
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
};
#[cfg(windows)]
let signal = async move {
// todo(windows):
// `ctrl_close` does not work well, because tokio's signal handler always returns soon,
// but system termiates the application soon after returning CTRL+CLOSE handler.
// So we should implement blocking handler to treat CTRL+CLOSE signal.
let mut ctrl_break = tokio::signal::windows::ctrl_break()
.expect("failed to listen for interrupt signal");
let mut ctrl_c = tokio::signal::windows::ctrl_c()
.expect("failed to listen for interrupt signal");
let ctrl_break = ctrl_break.recv();
let ctrl_c = ctrl_c.recv();
futures::pin_mut!(ctrl_break, ctrl_c);
futures::future::select(ctrl_break, ctrl_c).await;
};
axum::Server::from_tcp(listener)
.map_err(|e| anyhow!(e))?
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.with_graceful_shutdown(async move {
signal.await;
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
tracing::info!("Received interrupt signal");
if let Some(rpc_server) = rpc_server {
@@ -178,6 +157,10 @@ async fn main() -> Result<()> {
})
.await
.map_err(|e| anyhow!(e))?;
// todo("windows")
#[cfg(windows)]
unimplemented!();
}
_ => {
Err(anyhow!(

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +1,26 @@
use crate::db::{ChannelId, ChannelRole, DevServerId, PrincipalId, UserId};
use crate::db::{ChannelId, ChannelRole, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::{proto, ConnectionId};
use semantic_version::SemanticVersion;
use rpc::ConnectionId;
use serde::Serialize;
use std::fmt;
use tracing::instrument;
use util::{semver, SemanticVersion};
#[derive(Default, Serialize)]
pub struct ConnectionPool {
connections: BTreeMap<ConnectionId, Connection>,
connected_users: BTreeMap<UserId, ConnectedPrincipal>,
connected_dev_servers: BTreeMap<DevServerId, ConnectionId>,
connected_users: BTreeMap<UserId, ConnectedUser>,
channels: ChannelPool,
}
#[derive(Default, Serialize)]
struct ConnectedPrincipal {
struct ConnectedUser {
connection_ids: HashSet<ConnectionId>,
}
#[derive(Debug, Serialize)]
pub struct ZedVersion(pub SemanticVersion);
use std::fmt;
impl fmt::Display for ZedVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -31,13 +30,13 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 129, 2)
self.0 >= semver(0, 127, 3)
}
}
#[derive(Serialize)]
pub struct Connection {
pub principal_id: PrincipalId,
pub user_id: UserId,
pub admin: bool,
pub zed_version: ZedVersion,
}
@@ -60,7 +59,7 @@ impl ConnectionPool {
self.connections.insert(
connection_id,
Connection {
principal_id: PrincipalId::UserId(user_id),
user_id,
admin,
zed_version,
},
@@ -69,25 +68,6 @@ impl ConnectionPool {
connected_user.connection_ids.insert(connection_id);
}
pub fn add_dev_server(
&mut self,
connection_id: ConnectionId,
dev_server_id: DevServerId,
zed_version: ZedVersion,
) {
self.connections.insert(
connection_id,
Connection {
principal_id: PrincipalId::DevServerId(dev_server_id),
admin: false,
zed_version,
},
);
self.connected_dev_servers
.insert(dev_server_id, connection_id);
}
#[instrument(skip(self))]
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
let connection = self
@@ -95,18 +75,12 @@ impl ConnectionPool {
.get_mut(&connection_id)
.ok_or_else(|| anyhow!("no such connection"))?;
match connection.principal_id {
PrincipalId::UserId(user_id) => {
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
}
}
PrincipalId::DevServerId(dev_server_id) => {
self.connected_dev_servers.remove(&dev_server_id);
}
let user_id = connection.user_id;
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
connected_user.connection_ids.remove(&connection_id);
if connected_user.connection_ids.is_empty() {
self.connected_users.remove(&user_id);
self.channels.remove_user(&user_id);
}
self.connections.remove(&connection_id).unwrap();
Ok(())
@@ -136,18 +110,6 @@ impl ConnectionPool {
.copied()
}
pub fn dev_server_status(&self, dev_server_id: DevServerId) -> proto::DevServerStatus {
if self.dev_server_connection_id(dev_server_id).is_some() {
proto::DevServerStatus::Online
} else {
proto::DevServerStatus::Offline
}
}
pub fn dev_server_connection_id(&self, dev_server_id: DevServerId) -> Option<ConnectionId> {
self.connected_dev_servers.get(&dev_server_id).copied()
}
pub fn channel_user_ids(
&self,
channel_id: ChannelId,
@@ -192,39 +154,22 @@ impl ConnectionPool {
#[cfg(test)]
pub fn check_invariants(&self) {
for (connection_id, connection) in &self.connections {
match &connection.principal_id {
PrincipalId::UserId(user_id) => {
assert!(self
.connected_users
.get(user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
PrincipalId::DevServerId(dev_server_id) => {
assert_eq!(
self.connected_dev_servers.get(&dev_server_id).unwrap(),
connection_id
);
}
}
assert!(self
.connected_users
.get(&connection.user_id)
.unwrap()
.connection_ids
.contains(connection_id));
}
for (user_id, state) in &self.connected_users {
for connection_id in &state.connection_ids {
assert_eq!(
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::UserId(*user_id)
self.connections.get(connection_id).unwrap().user_id,
*user_id
);
}
}
for (dev_server_id, connection_id) in &self.connected_dev_servers {
assert_eq!(
self.connections.get(connection_id).unwrap().principal_id,
PrincipalId::DevServerId(*dev_server_id)
);
}
}
}

View File

@@ -8,7 +8,6 @@ mod channel_buffer_tests;
mod channel_guest_tests;
mod channel_message_tests;
mod channel_tests;
mod dev_server_tests;
mod editor_tests;
mod following_tests;
mod integration_tests;

View File

@@ -222,18 +222,8 @@ async fn test_remove_channel_message(
.update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
.await
.unwrap();
let msg_id_2 = channel_chat_a
.update(cx_a, |c, cx| {
c.send_message(
MessageParams {
text: "two @user_b".to_string(),
mentions: vec![(4..12, client_b.id())],
reply_to_message_id: None,
},
cx,
)
.unwrap()
})
channel_chat_a
.update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
.await
.unwrap();
channel_chat_a
@@ -243,24 +233,10 @@ async fn test_remove_channel_message(
// Clients A and B see all of the messages.
executor.run_until_parked();
let expected_messages = &["one", "two @user_b", "three"];
let expected_messages = &["one", "two", "three"];
assert_messages(&channel_chat_a, expected_messages, cx_a);
assert_messages(&channel_chat_b, expected_messages, cx_b);
// Ensure that client B received a notification for the mention.
client_b.notification_store().read_with(cx_b, |store, _| {
assert_eq!(store.notification_count(), 2);
let entry = store.notification_at(0).unwrap();
assert_eq!(
entry.notification,
Notification::ChannelMessageMention {
message_id: msg_id_2,
sender_id: client_a.id(),
channel_id: channel_id.0,
}
);
});
// Client A deletes one of their messages.
channel_chat_a
.update(cx_a, |c, cx| {
@@ -285,13 +261,6 @@ async fn test_remove_channel_message(
.await
.unwrap();
assert_messages(&channel_chat_c, expected_messages, cx_c);
// Ensure we remove the notifications when the message is removed
client_b.notification_store().read_with(cx_b, |store, _| {
// First notification is the channel invitation, second would be the mention
// notification, which should now be removed.
assert_eq!(store.notification_count(), 1);
});
}
#[track_caller]
@@ -629,97 +598,4 @@ async fn test_chat_editing(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
}
);
});
// Test update message and keep the mention and check that the body is updated correctly
channel_chat_a
.update(cx_a, |c, cx| {
c.update_message(
msg_id,
MessageParams {
text: "Updated body v2 including a mention for @user_b".into(),
reply_to_message_id: None,
mentions: vec![(37..45, client_b.id())],
},
cx,
)
.unwrap()
})
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
channel_chat_a.update(cx_a, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body v2 including a mention for @user_b",
)
});
channel_chat_b.update(cx_b, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body v2 including a mention for @user_b",
)
});
client_b.notification_store().read_with(cx_b, |store, _| {
let message = store.channel_message_for_id(msg_id);
assert!(message.is_some());
assert_eq!(
message.unwrap().body,
"Updated body v2 including a mention for @user_b"
);
assert_eq!(store.notification_count(), 2);
let entry = store.notification_at(0).unwrap();
assert_eq!(
entry.notification,
Notification::ChannelMessageMention {
message_id: msg_id,
sender_id: client_a.id(),
channel_id: channel_id.0,
}
);
});
// If we remove a mention from a message the corresponding mention notification
// should also be removed.
channel_chat_a
.update(cx_a, |c, cx| {
c.update_message(
msg_id,
MessageParams {
text: "Updated body without a mention".into(),
reply_to_message_id: None,
mentions: vec![],
},
cx,
)
.unwrap()
})
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
channel_chat_a.update(cx_a, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body without a mention",
)
});
channel_chat_b.update(cx_b, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body without a mention",
)
});
client_b.notification_store().read_with(cx_b, |store, _| {
// First notification is the channel invitation, second would be the mention
// notification, which should now be removed.
assert_eq!(store.notification_count(), 1);
});
}

View File

@@ -1,110 +0,0 @@
use std::path::Path;
use editor::Editor;
use fs::Fs;
use gpui::VisualTestContext;
use rpc::proto::DevServerStatus;
use serde_json::json;
use crate::tests::TestServer;
#[gpui::test]
async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
let (server, client) = TestServer::start1(cx).await;
let channel_id = server
.make_channel("test", None, (&client, cx), &mut [])
.await;
let resp = client
.channel_store()
.update(cx, |store, cx| {
store.create_dev_server(channel_id, "server-1".to_string(), cx)
})
.await
.unwrap();
client.channel_store().update(cx, |store, _| {
assert_eq!(store.dev_servers_for_id(channel_id).len(), 1);
assert_eq!(store.dev_servers_for_id(channel_id)[0].name, "server-1");
assert_eq!(
store.dev_servers_for_id(channel_id)[0].status,
DevServerStatus::Offline
);
});
let dev_server = server.create_dev_server(resp.access_token, cx2).await;
cx.executor().run_until_parked();
client.channel_store().update(cx, |store, _| {
assert_eq!(
store.dev_servers_for_id(channel_id)[0].status,
DevServerStatus::Online
);
});
dev_server
.fs()
.insert_tree(
"/remote",
json!({
"1.txt": "remote\nremote\nremote",
"2.js": "function two() { return 2; }",
"3.rs": "mod test",
}),
)
.await;
client
.channel_store()
.update(cx, |store, cx| {
store.create_remote_project(
channel_id,
client::DevServerId(resp.dev_server_id),
"project-1".to_string(),
"/remote".to_string(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let remote_workspace = client
.channel_store()
.update(cx, |store, cx| {
let projects = store.remote_projects_for_id(channel_id);
assert_eq!(projects.len(), 1);
assert_eq!(projects[0].name, "project-1");
workspace::join_remote_project(
projects[0].project_id.unwrap(),
client.app_state.clone(),
cx,
)
})
.await
.unwrap();
cx.executor().run_until_parked();
let cx2 = VisualTestContext::from_window(remote_workspace.into(), cx).as_mut();
cx2.simulate_keystrokes("cmd-p 1 enter");
let editor = remote_workspace
.update(cx2, |ws, cx| {
ws.active_item_as::<Editor>(cx).unwrap().clone()
})
.unwrap();
editor.update(cx2, |ed, cx| {
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
});
cx2.simulate_input("wow!");
cx2.simulate_keystrokes("cmd-s");
let content = dev_server
.fs()
.load(&Path::new("/remote/1.txt"))
.await
.unwrap();
assert_eq!(content, "wow!remote\nremote\nremote\n");
}

View File

@@ -23,7 +23,6 @@ use rpc::RECEIVE_TIMEOUT;
use serde_json::json;
use settings::SettingsStore;
use std::{
ops::Range,
path::Path,
sync::{
atomic::{self, AtomicBool, AtomicUsize},
@@ -1987,185 +1986,6 @@ struct Row10;"#};
struct Row1220;"#});
}
#[gpui::test(iterations = 10)]
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a
.fs()
.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": "line1\nline2\nline3\nline\n",
}),
)
.await;
let blame = git::blame::Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
blame_entry("0d0d0d", 1..2),
blame_entry("3a3a3a", 2..3),
blame_entry("4c4c4c", 3..4),
],
permalinks: [
("1b1b1b", "http://example.com/codehost/idx-0"),
("0d0d0d", "http://example.com/codehost/idx-1"),
("3a3a3a", "http://example.com/codehost/idx-2"),
("4c4c4c", "http://example.com/codehost/idx-3"),
]
.into_iter()
.map(|(sha, url)| (sha.parse().unwrap(), url.parse().unwrap()))
.collect(),
messages: [
("1b1b1b", "message for idx-0"),
("0d0d0d", "message for idx-1"),
("3a3a3a", "message for idx-2"),
("4c4c4c", "message for idx-3"),
]
.into_iter()
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
.collect(),
};
client_a.fs().set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(Path::new("file.txt"), blame)],
);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
// Create editor_a
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// Join the project as client B.
let project_b = client_b.build_remote_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// client_b now requests git blame for the open buffer
editor_b.update(cx_b, |editor_b, cx| {
assert!(editor_b.blame().is_none());
editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
Some(blame_entry("1b1b1b", 0..1)),
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
blame.update(cx, |blame, _| {
for (idx, entry) in entries.iter().flatten().enumerate() {
let details = blame.details_for_entry(entry).unwrap();
assert_eq!(details.message, format!("message for idx-{}", idx));
assert_eq!(
details.permalink.unwrap().to_string(),
format!("http://example.com/codehost/idx-{}", idx)
);
}
});
});
// editor_b updates the file, which gets sent to client_a, which updates git blame,
// which gets back to client_b.
editor_b.update(cx_b, |editor_b, cx| {
editor_b.edit([(Point::new(0, 3)..Point::new(0, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
// Now editor_a also updates the file
editor_a.update(cx_a, |editor_a, cx| {
editor_a.edit([(Point::new(1, 3)..Point::new(1, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
None,
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
}
fn extract_hint_labels(editor: &Editor) -> Vec<String> {
let mut labels = Vec::new();
for hint in editor.inlay_hint_cache().hints() {
@@ -2176,11 +1996,3 @@ fn extract_hint_labels(editor: &Editor) -> Vec<String> {
}
labels
}
fn blame_entry(sha: &str, range: Range<u32>) -> git::blame::BlameEntry {
git::blame::BlameEntry {
sha: sha.parse().unwrap(),
range,
..Default::default()
}
}

View File

@@ -2007,7 +2007,7 @@ async fn test_following_to_channel_notes_without_a_shared_project(
});
}
pub(crate) async fn join_channel(
async fn join_channel(
channel_id: ChannelId,
client: &TestClient,
cx: &mut TestAppContext,

View File

@@ -1,16 +1,12 @@
use crate::{
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
tests::{
channel_id, following_tests::join_channel, room_participants, rust_lang, RoomParticipants,
TestClient, TestServer,
},
tests::{channel_id, room_participants, rust_lang, RoomParticipants, TestClient, TestServer},
};
use anyhow::{anyhow, Result};
use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT};
use collections::{HashMap, HashSet};
use fs::{repository::GitFileStatus, FakeFs, Fs as _, RemoveOptions};
use futures::{channel::mpsc, StreamExt as _};
use futures::StreamExt as _;
use gpui::{
px, size, AppContext, BackgroundExecutor, BorrowAppContext, Model, Modifiers, MouseButton,
MouseDownEvent, TestAppContext,
@@ -22,7 +18,6 @@ use language::{
};
use live_kit_client::MacOSDisplay;
use lsp::LanguageServerId;
use parking_lot::Mutex;
use project::{
search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath,
SearchResult,
@@ -42,7 +37,6 @@ use std::{
time::Duration,
};
use unindent::Unindent as _;
use workspace::Pane;
#[ctor::ctor]
fn init_logger() {
@@ -1869,24 +1863,6 @@ async fn test_active_call_events(
executor.run_until_parked();
assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]);
assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]);
// Unsharing a project should dispatch the RemoteProjectUnshared event.
active_call_a
.update(cx_a, |call, cx| call.hang_up(cx))
.await
.unwrap();
executor.run_until_parked();
assert_eq!(
mem::take(&mut *events_a.borrow_mut()),
vec![room::Event::RoomLeft { channel_id: None }]
);
assert_eq!(
mem::take(&mut *events_b.borrow_mut()),
vec![room::Event::RemoteProjectUnshared {
project_id: project_a_id,
}]
);
}
fn active_call_events(cx: &mut TestAppContext) -> Rc<RefCell<Vec<room::Event>>> {
@@ -3760,7 +3736,7 @@ async fn test_leaving_project(
// Client B can't join the project, unless they re-join the room.
cx_b.spawn(|cx| {
Project::in_room(
Project::remote(
project_id,
client_b.app_state.client.clone(),
client_b.user_store().clone(),
@@ -4662,17 +4638,9 @@ async fn test_references(
let active_call_a = cx_a.read(ActiveCall::global);
client_a.language_registry().add(rust_lang());
let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter(
"Rust",
FakeLspAdapter {
name: "my-fake-lsp-adapter",
capabilities: lsp::ServerCapabilities {
references_provider: Some(lsp::OneOf::Left(true)),
..Default::default()
},
..Default::default()
},
);
let mut fake_language_servers = client_a
.language_registry()
.register_fake_lsp_adapter("Rust", Default::default());
client_a
.fs()
@@ -4702,40 +4670,12 @@ async fn test_references(
// Request references to a symbol as the guest.
let fake_language_server = fake_language_servers.next().await.unwrap();
let (lsp_response_tx, rx) = mpsc::unbounded::<Result<Option<Vec<lsp::Location>>>>();
fake_language_server.handle_request::<lsp::request::References, _, _>({
let rx = Arc::new(Mutex::new(Some(rx)));
move |params, _| {
assert_eq!(
params.text_document_position.text_document.uri.as_str(),
"file:///root/dir-1/one.rs"
);
let rx = rx.clone();
async move {
let mut response_rx = rx.lock().take().unwrap();
let result = response_rx.next().await.unwrap();
*rx.lock() = Some(response_rx);
result
}
}
});
let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx));
// User is informed that a request is pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().cloned().unwrap();
assert_eq!(status.name, "my-fake-lsp-adapter");
fake_language_server.handle_request::<lsp::request::References, _, _>(|params, _| async move {
assert_eq!(
status.pending_work.values().next().unwrap().message,
Some("Finding references...".into())
params.text_document_position.text_document.uri.as_str(),
"file:///root/dir-1/one.rs"
);
});
// Cause the language server to respond.
lsp_response_tx
.unbounded_send(Ok(Some(vec![
Ok(Some(vec![
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
@@ -4748,18 +4688,16 @@ async fn test_references(
uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
},
])))
]))
});
let references = project_b
.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx))
.await
.unwrap();
let references = references.await.unwrap();
executor.run_until_parked();
project_b.read_with(cx_b, |project, cx| {
// User is informed that a request is no longer pending.
let status = project.language_server_statuses().next().unwrap();
assert!(status.pending_work.is_empty());
cx_b.read(|cx| {
assert_eq!(references.len(), 3);
assert_eq!(project.worktrees().count(), 2);
assert_eq!(project_b.read(cx).worktrees().count(), 2);
let two_buffer = references[0].buffer.read(cx);
let three_buffer = references[2].buffer.read(cx);
@@ -4777,32 +4715,6 @@ async fn test_references(
assert_eq!(references[1].range.to_offset(two_buffer), 35..38);
assert_eq!(references[2].range.to_offset(three_buffer), 37..40);
});
let references = project_b.update(cx_b, |p, cx| p.references(&buffer_b, 7, cx));
// User is informed that a request is pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().cloned().unwrap();
assert_eq!(status.name, "my-fake-lsp-adapter");
assert_eq!(
status.pending_work.values().next().unwrap().message,
Some("Finding references...".into())
);
});
// Cause the LSP request to fail.
lsp_response_tx
.unbounded_send(Err(anyhow!("can't find references")))
.unwrap();
references.await.unwrap_err();
// User is informed that the request is no longer pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap();
assert!(status.pending_work.is_empty());
});
}
#[gpui::test(iterations = 10)]
@@ -5019,35 +4931,9 @@ async fn test_lsp_hover(
.await;
client_a.language_registry().add(rust_lang());
let language_server_names = ["rust-analyzer", "CrabLang-ls"];
let mut fake_language_servers = client_a
.language_registry()
.register_specific_fake_lsp_adapter(
"Rust",
true,
FakeLspAdapter {
name: "rust-analyzer",
capabilities: lsp::ServerCapabilities {
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
..lsp::ServerCapabilities::default()
},
..FakeLspAdapter::default()
},
);
let _other_server = client_a
.language_registry()
.register_specific_fake_lsp_adapter(
"Rust",
false,
FakeLspAdapter {
name: "CrabLang-ls",
capabilities: lsp::ServerCapabilities {
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
..lsp::ServerCapabilities::default()
},
..FakeLspAdapter::default()
},
);
.register_fake_lsp_adapter("Rust", Default::default());
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
let project_id = active_call_a
@@ -5060,133 +4946,62 @@ async fn test_lsp_hover(
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
let mut servers_with_hover_requests = HashMap::default();
for i in 0..language_server_names.len() {
let new_server = fake_language_servers.next().await.unwrap_or_else(|| {
panic!(
"Failed to get language server #{i} with name {}",
&language_server_names[i]
)
});
let new_server_name = new_server.server.name();
assert!(
!servers_with_hover_requests.contains_key(new_server_name),
"Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
);
let new_server_name = new_server_name.to_string();
match new_server_name.as_str() {
"CrabLang-ls" => {
servers_with_hover_requests.insert(
new_server_name.clone(),
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
move |params, _| {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
let name = new_server_name.clone();
async move {
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Scalar(
lsp::MarkedString::String(format!("{name} hover")),
),
range: None,
}))
}
},
),
);
}
"rust-analyzer" => {
servers_with_hover_requests.insert(
new_server_name.clone(),
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
|params, _| async move {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
assert_eq!(
params.text_document_position_params.position,
lsp::Position::new(0, 22)
);
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Array(vec![
lsp::MarkedString::String("Test hover content.".to_string()),
lsp::MarkedString::LanguageString(lsp::LanguageString {
language: "Rust".to_string(),
value: "let foo = 42;".to_string(),
}),
]),
range: Some(lsp::Range::new(
lsp::Position::new(0, 22),
lsp::Position::new(0, 29),
)),
}))
},
),
);
}
unexpected => panic!("Unexpected server name: {unexpected}"),
}
}
// Request hover information as the guest.
let mut hovers = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
.await;
assert_eq!(
hovers.len(),
2,
"Expected two hovers from both language servers, but got: {hovers:?}"
);
let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
|mut hover_request| async move {
hover_request
.next()
.await
.expect("All hover requests should have been triggered")
let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::HoverRequest, _, _>(
|params, _| async move {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
assert_eq!(
params.text_document_position_params.position,
lsp::Position::new(0, 22)
);
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Array(vec![
lsp::MarkedString::String("Test hover content.".to_string()),
lsp::MarkedString::LanguageString(lsp::LanguageString {
language: "Rust".to_string(),
value: "let foo = 42;".to_string(),
}),
]),
range: Some(lsp::Range::new(
lsp::Position::new(0, 22),
lsp::Position::new(0, 29),
)),
}))
},
))
.await;
);
let hover_info = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
.await
.unwrap()
.unwrap();
hovers.sort_by_key(|hover| hover.contents.len());
let first_hover = hovers.first().cloned().unwrap();
assert_eq!(
first_hover.contents,
vec![project::HoverBlock {
text: "CrabLang-ls hover".to_string(),
kind: HoverBlockKind::Markdown,
},]
);
let second_hover = hovers.last().cloned().unwrap();
assert_eq!(
second_hover.contents,
vec![
project::HoverBlock {
text: "Test hover content.".to_string(),
kind: HoverBlockKind::Markdown,
},
project::HoverBlock {
text: "let foo = 42;".to_string(),
kind: HoverBlockKind::Code {
language: "Rust".to_string()
},
}
]
);
buffer_b.read_with(cx_b, |buffer, _| {
let snapshot = buffer.snapshot();
assert_eq!(second_hover.range.unwrap().to_offset(&snapshot), 22..29);
assert_eq!(hover_info.range.unwrap().to_offset(&snapshot), 22..29);
assert_eq!(
hover_info.contents,
vec![
project::HoverBlock {
text: "Test hover content.".to_string(),
kind: HoverBlockKind::Markdown,
},
project::HoverBlock {
text: "let foo = 42;".to_string(),
kind: HoverBlockKind::Code {
language: "Rust".to_string()
},
}
]
);
});
}
@@ -6095,7 +5910,7 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_cmd_k_left(cx: &mut TestAppContext) {
let (_, client) = TestServer::start1(cx).await;
let client = TestServer::start1(cx).await;
let (workspace, cx) = client.build_test_workspace(cx).await;
cx.simulate_keystrokes("cmd-n");
@@ -6115,282 +5930,3 @@ async fn test_cmd_k_left(cx: &mut TestAppContext) {
assert!(workspace.items(cx).collect::<Vec<_>>().len() == 2);
});
}
#[gpui::test]
async fn test_join_after_restart(cx1: &mut TestAppContext, cx2: &mut TestAppContext) {
let (mut server, client) = TestServer::start1(cx1).await;
let channel1 = server.make_public_channel("channel1", &client, cx1).await;
let channel2 = server.make_public_channel("channel2", &client, cx1).await;
join_channel(channel1, &client, cx1).await.unwrap();
drop(client);
let client2 = server.create_client(cx2, "user_a").await;
join_channel(channel2, &client2, cx2).await.unwrap();
}
#[gpui::test]
async fn test_preview_tabs(cx: &mut TestAppContext) {
let (_server, client) = TestServer::start1(cx).await;
let (workspace, cx) = client.build_test_workspace(cx).await;
let project = workspace.update(cx, |workspace, _| workspace.project().clone());
let worktree_id = project.update(cx, |project, cx| {
project.worktrees().next().unwrap().read(cx).id()
});
let path_1 = ProjectPath {
worktree_id,
path: Path::new("1.txt").into(),
};
let path_2 = ProjectPath {
worktree_id,
path: Path::new("2.js").into(),
};
let path_3 = ProjectPath {
worktree_id,
path: Path::new("3.rs").into(),
};
let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
let get_path = |pane: &Pane, idx: usize, cx: &AppContext| {
pane.item_for_index(idx).unwrap().project_path(cx).unwrap()
};
// Opening item 3 as a "permanent" tab
workspace
.update(cx, |workspace, cx| {
workspace.open_path(path_3.clone(), None, false, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(pane.preview_item_id(), None);
assert!(!pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Open item 1 as preview
workspace
.update(cx, |workspace, cx| {
workspace.open_path_preview(path_1.clone(), None, true, true, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(get_path(pane, 1, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Open item 2 as preview
workspace
.update(cx, |workspace, cx| {
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(get_path(pane, 1, cx), path_2.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Going back should show item 1 as preview
workspace
.update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(get_path(pane, 1, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(pane.can_navigate_forward());
});
// Closing item 1
pane.update(cx, |pane, cx| {
pane.close_item_by_id(
pane.active_item().unwrap().item_id(),
workspace::SaveIntent::Skip,
cx,
)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(pane.preview_item_id(), None);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Going back should show item 1 as preview
workspace
.update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_3.clone());
assert_eq!(get_path(pane, 1, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(pane.can_navigate_forward());
});
// Close permanent tab
pane.update(cx, |pane, cx| {
let id = pane.items().nth(0).unwrap().item_id();
pane.close_item_by_id(id, workspace::SaveIntent::Skip, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(0).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(pane.can_navigate_forward());
});
// Split pane to the right
pane.update(cx, |pane, cx| {
pane.split(workspace::SplitDirection::Right, cx);
});
let right_pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(0).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(pane.can_navigate_forward());
});
right_pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(pane.preview_item_id(), None);
assert!(!pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Open item 2 as preview in right pane
workspace
.update(cx, |workspace, cx| {
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(0).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(pane.can_navigate_forward());
});
right_pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(get_path(pane, 1, cx), path_2.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
// Focus left pane
workspace.update(cx, |workspace, cx| {
workspace.activate_pane_in_direction(workspace::SplitDirection::Left, cx)
});
// Open item 2 as preview in left pane
workspace
.update(cx, |workspace, cx| {
workspace.open_path_preview(path_2.clone(), None, true, true, cx)
})
.await
.unwrap();
pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 1);
assert_eq!(get_path(pane, 0, cx), path_2.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(0).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
right_pane.update(cx, |pane, cx| {
assert_eq!(pane.items_len(), 2);
assert_eq!(get_path(pane, 0, cx), path_1.clone());
assert_eq!(get_path(pane, 1, cx), path_2.clone());
assert_eq!(
pane.preview_item_id(),
Some(pane.items().nth(1).unwrap().item_id())
);
assert!(pane.can_navigate_backward());
assert!(!pane.can_navigate_forward());
});
}

View File

@@ -832,7 +832,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.boxed(),
LspRequestKind::CodeAction => project
.code_actions(&buffer, offset..offset, cx)
.map(|_| Ok(()))
.map_ok(|_| ())
.boxed(),
LspRequestKind::Definition => project
.definition(&buffer, offset, cx)
@@ -1347,11 +1347,13 @@ impl RandomizedTest for ProjectCollaborationTest {
client.username
);
let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty());
let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty());
let host_saved_version_fingerprint =
host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
let guest_saved_version_fingerprint =
guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
assert_eq!(
guest_is_dirty, host_is_dirty,
"guest {} dirty state does not match host's for path {path:?} in project {project_id}",
guest_saved_version_fingerprint, host_saved_version_fingerprint,
"guest {} saved fingerprint does not match host's for path {path:?} in project {project_id}",
client.username
);

Some files were not shown because too many files have changed in this diff Show More