Compare commits
132 Commits
deeper-doc
...
crdb2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e316f657bc | ||
|
|
492040dec4 | ||
|
|
47aa761ca9 | ||
|
|
98699a65c1 | ||
|
|
f024fcff3d | ||
|
|
2f05f5bc5c | ||
|
|
22a9293cba | ||
|
|
cceebee397 | ||
|
|
38fb841d1f | ||
|
|
48763d0663 | ||
|
|
089cc85d4a | ||
|
|
995b082c64 | ||
|
|
64755a7aea | ||
|
|
3348c3ab4c | ||
|
|
dceb0827e8 | ||
|
|
c1e18059f8 | ||
|
|
351a3c0815 | ||
|
|
28c5e33e0c | ||
|
|
5c7a8f779a | ||
|
|
b7cb2381f2 | ||
|
|
7db68547fa | ||
|
|
eb845ee201 | ||
|
|
8ea2bd4c7e | ||
|
|
7460381285 | ||
|
|
eab98eb9c9 | ||
|
|
6eda9c9745 | ||
|
|
8dd7c2cddf | ||
|
|
3bbe574341 | ||
|
|
51ee60b421 | ||
|
|
193be271a8 | ||
|
|
ce48555f8d | ||
|
|
ecd9422d11 | ||
|
|
0eb26d29ee | ||
|
|
3a43adba00 | ||
|
|
3419f5fc42 | ||
|
|
e7214a429d | ||
|
|
c9ac7b8e35 | ||
|
|
e243856559 | ||
|
|
c516b8f038 | ||
|
|
03447b9e18 | ||
|
|
464a4439f7 | ||
|
|
0e60730742 | ||
|
|
25ad3185e0 | ||
|
|
bac6e2fee7 | ||
|
|
065ab93ca7 | ||
|
|
83592306c5 | ||
|
|
e650c0166d | ||
|
|
f1859e3645 | ||
|
|
b1a0188467 | ||
|
|
218629cdd4 | ||
|
|
0761383752 | ||
|
|
b616f9c27f | ||
|
|
5e465f2029 | ||
|
|
7d767ff0a3 | ||
|
|
3cabd4bf64 | ||
|
|
2972bdc0e2 | ||
|
|
a295b90597 | ||
|
|
891f195f7b | ||
|
|
6e1b99b039 | ||
|
|
00d1561156 | ||
|
|
d5fbf75ccf | ||
|
|
61bbb3539a | ||
|
|
c560a24e7d | ||
|
|
da03610555 | ||
|
|
363ac6bc96 | ||
|
|
97159bd88d | ||
|
|
0b57df5deb | ||
|
|
7652a8ae23 | ||
|
|
1d193585b0 | ||
|
|
af5efcea1f | ||
|
|
228202a469 | ||
|
|
e1fbef0dfd | ||
|
|
7d7fd7d25d | ||
|
|
6a1b257d39 | ||
|
|
a695322f83 | ||
|
|
cb2d05b78f | ||
|
|
45d4de75b3 | ||
|
|
20c1f8245a | ||
|
|
b16075d00c | ||
|
|
da22e0dd0b | ||
|
|
fb3ef0d140 | ||
|
|
e71b642f44 | ||
|
|
6cedfa0ce7 | ||
|
|
209b1d1931 | ||
|
|
6986ac4c27 | ||
|
|
d50d1611b9 | ||
|
|
1260c616ba | ||
|
|
89951f7e66 | ||
|
|
cd81dad2fa | ||
|
|
3a08d7ab43 | ||
|
|
49dc63812a | ||
|
|
c0a3642f77 | ||
|
|
4d5441c09d | ||
|
|
2dc840132b | ||
|
|
5d766f61fa | ||
|
|
18b4573064 | ||
|
|
d044dc8485 | ||
|
|
f00bea5d0f | ||
|
|
b43df6048b | ||
|
|
eb914682b3 | ||
|
|
5b7e31c075 | ||
|
|
922fcaf5a6 | ||
|
|
9f88460870 | ||
|
|
e5d1cf84cf | ||
|
|
41d2c52638 | ||
|
|
d1a55d64a8 | ||
|
|
db06244972 | ||
|
|
597469bbbd | ||
|
|
e0c192d831 | ||
|
|
b2a0a7fa3c | ||
|
|
0b1a589183 | ||
|
|
7e694d1bcf | ||
|
|
890443241d | ||
|
|
b014f9f017 | ||
|
|
f40d2313fb | ||
|
|
2dee4f87fd | ||
|
|
54afa6f69f | ||
|
|
55511d1591 | ||
|
|
6c0cb9eaa3 | ||
|
|
24e7b69f8f | ||
|
|
a4cdca5141 | ||
|
|
86cd87e993 | ||
|
|
88000eb7e2 | ||
|
|
ab5a462e0c | ||
|
|
79430fc7d2 | ||
|
|
f96e4ba84f | ||
|
|
7be1ffb9ec | ||
|
|
93a5d0ca29 | ||
|
|
328d98dddc | ||
|
|
76ab9e4d66 | ||
|
|
c477c12956 | ||
|
|
1ffd87b87e |
4
.github/workflows/ci.yml
vendored
@@ -254,7 +254,7 @@ jobs:
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
target/release/Zed.dmg
|
||||
body_file: target/release-notes.md
|
||||
body_path: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -402,7 +402,7 @@ jobs:
|
||||
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
|
||||
10
.mailmap
@@ -7,10 +7,12 @@
|
||||
# Reference: https://git-scm.com/docs/gitmailmap
|
||||
|
||||
# Keep these entries sorted alphabetically.
|
||||
# In Zed: `editor: sort lines case sensitive`
|
||||
# In Zed: `editor: sort lines case insensitive`
|
||||
|
||||
Alex Viscreanu <alexviscreanu@gmail.com>
|
||||
Alex Viscreanu <alexviscreanu@gmail.com> <alexandru.viscreanu@kiwi.com>
|
||||
amtoaer <amtoaer@gmail.com>
|
||||
amtoaer <amtoaer@gmail.com> <amtoaer@outlook.com>
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Bennet Bo Fenner <bennet@zed.dev>
|
||||
@@ -20,6 +22,8 @@ Christian Bergschneider <christian.bergschneider@gmx.de>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de> <magiclake@gmx.de>
|
||||
Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrsen@users.noreply.github.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
@@ -54,12 +58,16 @@ Nate Butler <iamnbutler@gmail.com> <nate@zed.dev>
|
||||
Nathan Sobo <nathan@zed.dev>
|
||||
Nathan Sobo <nathan@zed.dev> <nathan@warp.dev>
|
||||
Nathan Sobo <nathan@zed.dev> <nathansobo@gmail.com>
|
||||
Nigel Jose <nigelmjose@gmail.com>
|
||||
Nigel Jose <nigelmjose@gmail.com> <nigel.jose@student.manchester.ac.uk>
|
||||
Peter Tripp <peter@zed.dev>
|
||||
Peter Tripp <peter@zed.dev> <petertripp@gmail.com>
|
||||
Petros Amoiridis <petros@hey.com>
|
||||
Petros Amoiridis <petros@hey.com> <petros@zed.dev>
|
||||
Piotr Osiewicz <piotr@zed.dev>
|
||||
Piotr Osiewicz <piotr@zed.dev> <24362066+osiewicz@users.noreply.github.com>
|
||||
Pocæus <github@pocaeus.com>
|
||||
Pocæus <github@pocaeus.com> <pseudomata@proton.me>
|
||||
Rashid Almheiri <r.muhairi@pm.me>
|
||||
Rashid Almheiri <r.muhairi@pm.me> <69181766+huwaireb@users.noreply.github.com>
|
||||
Richard Feldman <oss@rtfeldman.com>
|
||||
|
||||
@@ -19,6 +19,13 @@
|
||||
"JavaScript": {
|
||||
"tab_size": 2,
|
||||
"formatter": "prettier"
|
||||
},
|
||||
"Rust": {
|
||||
"tasks": {
|
||||
"variables": {
|
||||
"RUST_DEFAULT_PACKAGE_RUN": "zed"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"formatter": "auto",
|
||||
|
||||
@@ -41,7 +41,7 @@ We plan to set aside time each week to pair program with contributors on promisi
|
||||
|
||||
Zed is made up of several smaller crates - let's go over those you're most likely to interact with:
|
||||
|
||||
- [`gpui`](/crates/gpui) is a GPU-accelerated UI framework which provides all of the building blocks for Zed. **We recommend familiarizing yourself with the root level GPUI documentation**
|
||||
- [`gpui`](/crates/gpui) is a GPU-accelerated UI framework which provides all of the building blocks for Zed. **We recommend familiarizing yourself with the root level GPUI documentation.**
|
||||
- [`editor`](/crates/editor) contains the core `Editor` type that drives both the code editor and all various input fields within Zed. It also handles a display layer for LSP features such as Inlay Hints or code completions.
|
||||
- [`project`](/crates/project) manages files and navigation within the filetree. It is also Zed's side of communication with LSP.
|
||||
- [`workspace`](/crates/workspace) handles local state serialization and groups projects together.
|
||||
|
||||
656
Cargo.lock
generated
54
Cargo.toml
@@ -20,7 +20,9 @@ members = [
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/copilot",
|
||||
"crates/crdb",
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
"crates/diagnostics",
|
||||
"crates/editor",
|
||||
"crates/extension",
|
||||
@@ -44,6 +46,7 @@ members = [
|
||||
"crates/html_to_markdown",
|
||||
"crates/http",
|
||||
"crates/image_viewer",
|
||||
"crates/indexed_docs",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/journal",
|
||||
@@ -77,14 +80,10 @@ members = [
|
||||
"crates/refineable",
|
||||
"crates/refineable/derive_refineable",
|
||||
"crates/release_channel",
|
||||
"crates/dev_server_projects",
|
||||
"crates/repl",
|
||||
"crates/rich_text",
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
"crates/rustdoc",
|
||||
"crates/task",
|
||||
"crates/tasks_ui",
|
||||
"crates/search",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_version",
|
||||
@@ -95,17 +94,20 @@ members = [
|
||||
"crates/story",
|
||||
"crates/storybook",
|
||||
"crates/sum_tree",
|
||||
"crates/tab_switcher",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/tab_switcher",
|
||||
"crates/task",
|
||||
"crates/tasks_ui",
|
||||
"crates/telemetry_events",
|
||||
"crates/terminal",
|
||||
"crates/terminal_view",
|
||||
"crates/text",
|
||||
"crates/theme",
|
||||
"crates/theme_importer",
|
||||
"crates/theme_selector",
|
||||
"crates/telemetry_events",
|
||||
"crates/time_format",
|
||||
"crates/title_bar",
|
||||
"crates/ui",
|
||||
"crates/ui_text_field",
|
||||
"crates/util",
|
||||
@@ -139,6 +141,7 @@ members = [
|
||||
"extensions/snippets",
|
||||
"extensions/svelte",
|
||||
"extensions/terraform",
|
||||
"extensions/test-extension",
|
||||
"extensions/toml",
|
||||
"extensions/uiua",
|
||||
"extensions/vue",
|
||||
@@ -157,10 +160,8 @@ assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_tooling = { path = "crates/assistant_tooling" }
|
||||
async-watch = "0.3.1"
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
base64 = "0.13"
|
||||
breadcrumbs = { path = "crates/breadcrumbs" }
|
||||
call = { path = "crates/call" }
|
||||
channel = { path = "crates/channel" }
|
||||
@@ -173,8 +174,8 @@ collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
dashmap = "5.5.3"
|
||||
db = { path = "crates/db" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
extension = { path = "crates/extension" }
|
||||
@@ -195,9 +196,10 @@ gpui_macros = { path = "crates/gpui_macros" }
|
||||
headless = { path = "crates/headless" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http = { path = "crates/http" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_selector = { path = "crates/language_selector" }
|
||||
@@ -223,21 +225,16 @@ plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
prettier = { path = "crates/prettier" }
|
||||
project = { path = "crates/project" }
|
||||
proto = { path = "crates/proto" }
|
||||
worktree = { path = "crates/worktree" }
|
||||
project_panel = { path = "crates/project_panel" }
|
||||
project_symbols = { path = "crates/project_symbols" }
|
||||
proto = { path = "crates/proto" }
|
||||
quick_action_bar = { path = "crates/quick_action_bar" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
repl = { path = "crates/repl" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rustdoc = { path = "crates/rustdoc" }
|
||||
task = { path = "crates/task" }
|
||||
tasks_ui = { path = "crates/tasks_ui" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
@@ -245,20 +242,23 @@ settings = { path = "crates/settings" }
|
||||
snippet = { path = "crates/snippet" }
|
||||
sqlez = { path = "crates/sqlez" }
|
||||
sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
story = { path = "crates/story" }
|
||||
storybook = { path = "crates/storybook" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
tab_switcher = { path = "crates/tab_switcher" }
|
||||
task = { path = "crates/task" }
|
||||
tasks_ui = { path = "crates/tasks_ui" }
|
||||
telemetry_events = { path = "crates/telemetry_events" }
|
||||
terminal = { path = "crates/terminal" }
|
||||
terminal_view = { path = "crates/terminal_view" }
|
||||
text = { path = "crates/text" }
|
||||
theme = { path = "crates/theme" }
|
||||
theme_importer = { path = "crates/theme_importer" }
|
||||
theme_selector = { path = "crates/theme_selector" }
|
||||
telemetry_events = { path = "crates/telemetry_events" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
title_bar = { path = "crates/title_bar" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_text_field = { path = "crates/ui_text_field" }
|
||||
util = { path = "crates/util" }
|
||||
@@ -266,12 +266,13 @@ vcs_menu = { path = "crates/vcs_menu" }
|
||||
vim = { path = "crates/vim" }
|
||||
welcome = { path = "crates/welcome" }
|
||||
workspace = { path = "crates/workspace" }
|
||||
worktree = { path = "crates/worktree" }
|
||||
zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
|
||||
alacritty_terminal = "0.23"
|
||||
anyhow = "1.0.57"
|
||||
any_vec = "0.13"
|
||||
anyhow = "1.0.57"
|
||||
ashpd = "0.8.0"
|
||||
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
|
||||
async-dispatcher = { version = "0.1"}
|
||||
@@ -279,20 +280,23 @@ async-fs = "1.6"
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.4.2"
|
||||
async-trait = "0.1"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.13"
|
||||
bitflags = "2.4.2"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "21a56f780e21e4cb42c70a1dcf4b59842d1ad7f7" }
|
||||
cap-std = "3.0"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clickhouse = { version = "0.11.6" }
|
||||
cocoa = "0.25"
|
||||
ctor = "0.2.6"
|
||||
core-foundation = { version = "0.9.3" }
|
||||
core-foundation-sys = "0.8.6"
|
||||
ctor = "0.2.6"
|
||||
dashmap = "5.5.3"
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
emojis = "0.6.1"
|
||||
@@ -308,7 +312,7 @@ heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.23"
|
||||
image = "0.25.1"
|
||||
indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "1"
|
||||
# We explicitly disable http2 support in isahc.
|
||||
@@ -356,7 +360,7 @@ shellexpand = "2.1.0"
|
||||
shlex = "1.3.0"
|
||||
signal-hook = "0.3.17"
|
||||
similar = "1.3"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smallvec = { version = "1.6", features = ["union", "serde"] }
|
||||
smol = "1.2"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
|
||||
BIN
assets/fonts/plex-mono/ZedPlexMono-Bold.ttf
Normal file
BIN
assets/fonts/plex-mono/ZedPlexMono-BoldItalic.ttf
Normal file
BIN
assets/fonts/plex-mono/ZedPlexMono-Italic.ttf
Normal file
BIN
assets/fonts/plex-mono/ZedPlexMono-Regular.ttf
Normal file
92
assets/fonts/plex-mono/license.txt
Normal file
@@ -0,0 +1,92 @@
|
||||
Copyright © 2017 IBM Corp. with Reserved Font Name "Plex"
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License, Version 1.1.
|
||||
This license is copied below, and is also available with a FAQ at:
|
||||
http://scripts.sil.org/OFL
|
||||
|
||||
-----------------------------------------------------------
|
||||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
|
||||
-----------------------------------------------------------
|
||||
|
||||
PREAMBLE
|
||||
The goals of the Open Font License (OFL) are to stimulate worldwide
|
||||
development of collaborative font projects, to support the font creation
|
||||
efforts of academic and linguistic communities, and to provide a free and
|
||||
open framework in which fonts may be shared and improved in partnership
|
||||
with others.
|
||||
|
||||
The OFL allows the licensed fonts to be used, studied, modified and
|
||||
redistributed freely as long as they are not sold by themselves. The
|
||||
fonts, including any derivative works, can be bundled, embedded,
|
||||
redistributed and/or sold with any software provided that any reserved
|
||||
names are not used by derivative works. The fonts and derivatives,
|
||||
however, cannot be released under any other type of license. The
|
||||
requirement for fonts to remain under this license does not apply
|
||||
to any document created using the fonts or their derivatives.
|
||||
|
||||
DEFINITIONS
|
||||
"Font Software" refers to the set of files released by the Copyright
|
||||
Holder(s) under this license and clearly marked as such. This may
|
||||
include source files, build scripts and documentation.
|
||||
|
||||
"Reserved Font Name" refers to any names specified as such after the
|
||||
copyright statement(s).
|
||||
|
||||
"Original Version" refers to the collection of Font Software components as
|
||||
distributed by the Copyright Holder(s).
|
||||
|
||||
"Modified Version" refers to any derivative made by adding to, deleting,
|
||||
or substituting -- in part or in whole -- any of the components of the
|
||||
Original Version, by changing formats or by porting the Font Software to a
|
||||
new environment.
|
||||
|
||||
"Author" refers to any designer, engineer, programmer, technical
|
||||
writer or other person who contributed to the Font Software.
|
||||
|
||||
PERMISSION & CONDITIONS
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of the Font Software, to use, study, copy, merge, embed, modify,
|
||||
redistribute, and sell modified and unmodified copies of the Font
|
||||
Software, subject to the following conditions:
|
||||
|
||||
1) Neither the Font Software nor any of its individual components,
|
||||
in Original or Modified Versions, may be sold by itself.
|
||||
|
||||
2) Original or Modified Versions of the Font Software may be bundled,
|
||||
redistributed and/or sold with any software, provided that each copy
|
||||
contains the above copyright notice and this license. These can be
|
||||
included either as stand-alone text files, human-readable headers or
|
||||
in the appropriate machine-readable metadata fields within text or
|
||||
binary files as long as those fields can be easily viewed by the user.
|
||||
|
||||
3) No Modified Version of the Font Software may use the Reserved Font
|
||||
Name(s) unless explicit written permission is granted by the corresponding
|
||||
Copyright Holder. This restriction only applies to the primary font name as
|
||||
presented to the users.
|
||||
|
||||
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
|
||||
Software shall not be used to promote, endorse or advertise any
|
||||
Modified Version, except to acknowledge the contribution(s) of the
|
||||
Copyright Holder(s) and the Author(s) or with their explicit written
|
||||
permission.
|
||||
|
||||
5) The Font Software, modified or unmodified, in part or in whole,
|
||||
must be distributed entirely under this license, and must not be
|
||||
distributed under any other license. The requirement for fonts to
|
||||
remain under this license does not apply to any document created
|
||||
using the Font Software.
|
||||
|
||||
TERMINATION
|
||||
This license becomes null and void if any of the above conditions are
|
||||
not met.
|
||||
|
||||
DISCLAIMER
|
||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
|
||||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
|
||||
OTHER DEALINGS IN THE FONT SOFTWARE.
|
||||
BIN
assets/fonts/plex-sans/ZedPlexSans-Bold.ttf
Normal file
BIN
assets/fonts/plex-sans/ZedPlexSans-BoldItalic.ttf
Normal file
BIN
assets/fonts/plex-sans/ZedPlexSans-Italic.ttf
Normal file
BIN
assets/fonts/plex-sans/ZedPlexSans-Regular.ttf
Normal file
92
assets/fonts/plex-sans/license.txt
Normal file
@@ -0,0 +1,92 @@
|
||||
Copyright © 2017 IBM Corp. with Reserved Font Name "Plex"
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License, Version 1.1.
|
||||
This license is copied below, and is also available with a FAQ at:
|
||||
http://scripts.sil.org/OFL
|
||||
|
||||
-----------------------------------------------------------
|
||||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
|
||||
-----------------------------------------------------------
|
||||
|
||||
PREAMBLE
|
||||
The goals of the Open Font License (OFL) are to stimulate worldwide
|
||||
development of collaborative font projects, to support the font creation
|
||||
efforts of academic and linguistic communities, and to provide a free and
|
||||
open framework in which fonts may be shared and improved in partnership
|
||||
with others.
|
||||
|
||||
The OFL allows the licensed fonts to be used, studied, modified and
|
||||
redistributed freely as long as they are not sold by themselves. The
|
||||
fonts, including any derivative works, can be bundled, embedded,
|
||||
redistributed and/or sold with any software provided that any reserved
|
||||
names are not used by derivative works. The fonts and derivatives,
|
||||
however, cannot be released under any other type of license. The
|
||||
requirement for fonts to remain under this license does not apply
|
||||
to any document created using the fonts or their derivatives.
|
||||
|
||||
DEFINITIONS
|
||||
"Font Software" refers to the set of files released by the Copyright
|
||||
Holder(s) under this license and clearly marked as such. This may
|
||||
include source files, build scripts and documentation.
|
||||
|
||||
"Reserved Font Name" refers to any names specified as such after the
|
||||
copyright statement(s).
|
||||
|
||||
"Original Version" refers to the collection of Font Software components as
|
||||
distributed by the Copyright Holder(s).
|
||||
|
||||
"Modified Version" refers to any derivative made by adding to, deleting,
|
||||
or substituting -- in part or in whole -- any of the components of the
|
||||
Original Version, by changing formats or by porting the Font Software to a
|
||||
new environment.
|
||||
|
||||
"Author" refers to any designer, engineer, programmer, technical
|
||||
writer or other person who contributed to the Font Software.
|
||||
|
||||
PERMISSION & CONDITIONS
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of the Font Software, to use, study, copy, merge, embed, modify,
|
||||
redistribute, and sell modified and unmodified copies of the Font
|
||||
Software, subject to the following conditions:
|
||||
|
||||
1) Neither the Font Software nor any of its individual components,
|
||||
in Original or Modified Versions, may be sold by itself.
|
||||
|
||||
2) Original or Modified Versions of the Font Software may be bundled,
|
||||
redistributed and/or sold with any software, provided that each copy
|
||||
contains the above copyright notice and this license. These can be
|
||||
included either as stand-alone text files, human-readable headers or
|
||||
in the appropriate machine-readable metadata fields within text or
|
||||
binary files as long as those fields can be easily viewed by the user.
|
||||
|
||||
3) No Modified Version of the Font Software may use the Reserved Font
|
||||
Name(s) unless explicit written permission is granted by the corresponding
|
||||
Copyright Holder. This restriction only applies to the primary font name as
|
||||
presented to the users.
|
||||
|
||||
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
|
||||
Software shall not be used to promote, endorse or advertise any
|
||||
Modified Version, except to acknowledge the contribution(s) of the
|
||||
Copyright Holder(s) and the Author(s) or with their explicit written
|
||||
permission.
|
||||
|
||||
5) The Font Software, modified or unmodified, in part or in whole,
|
||||
must be distributed entirely under this license, and must not be
|
||||
distributed under any other license. The requirement for fonts to
|
||||
remain under this license does not apply to any document created
|
||||
using the Font Software.
|
||||
|
||||
TERMINATION
|
||||
This license becomes null and void if any of the above conditions are
|
||||
not met.
|
||||
|
||||
DISCLAIMER
|
||||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
|
||||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
|
||||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
|
||||
OTHER DEALINGS IN THE FONT SOFTWARE.
|
||||
1
assets/icons/book.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-book"><path d="M4 19.5v-15A2.5 2.5 0 0 1 6.5 2H20v20H6.5a2.5 2.5 0 0 1 0-5H20"/></svg>
|
||||
|
After Width: | Height: | Size: 289 B |
1
assets/icons/book_copy.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-book-copy"><path d="M2 16V4a2 2 0 0 1 2-2h11"/><path d="M5 14H4a2 2 0 1 0 0 4h1"/><path d="M22 18H11a2 2 0 1 0 0 4h11V6H11a2 2 0 0 0-2 2v12"/></svg>
|
||||
|
After Width: | Height: | Size: 351 B |
1
assets/icons/book_plus.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-book-plus"><path d="M4 19.5v-15A2.5 2.5 0 0 1 6.5 2H20v20H6.5a2.5 2.5 0 0 1 0-5H20"/><path d="M9 10h6"/><path d="M12 7v6"/></svg>
|
||||
|
After Width: | Height: | Size: 332 B |
1
assets/icons/chevron_up_down.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-chevrons-up-down"><path d="m7 15 5 5 5-5"/><path d="m7 9 5-5 5 5"/></svg>
|
||||
|
After Width: | Height: | Size: 276 B |
@@ -94,6 +94,7 @@
|
||||
"lua": "lua",
|
||||
"m4a": "audio",
|
||||
"m4v": "video",
|
||||
"markdown": "document",
|
||||
"md": "document",
|
||||
"mdb": "storage",
|
||||
"mdf": "storage",
|
||||
|
||||
1
assets/icons/font.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-type"><polyline points="4 7 4 4 20 4 20 7"/><line x1="9" x2="15" y1="20" y2="20"/><line x1="12" x2="12" y1="4" y2="20"/></svg>
|
||||
|
After Width: | Height: | Size: 329 B |
1
assets/icons/font_size.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-a-large-small"><path d="M21 14h-5"/><path d="M16 16v-3.5a2.5 2.5 0 0 1 5 0V16"/><path d="M4.5 13h6"/><path d="m3 16 4.5-9 4.5 9"/></svg>
|
||||
|
After Width: | Height: | Size: 339 B |
1
assets/icons/font_weight.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bold"><path d="M6 12h9a4 4 0 0 1 0 8H7a1 1 0 0 1-1-1V5a1 1 0 0 1 1-1h7a4 4 0 0 1 0 8"/></svg>
|
||||
|
After Width: | Height: | Size: 296 B |
4
assets/icons/generic_close.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.5 4.5L4.5 11.5" stroke="black" stroke-linecap="square" stroke-linejoin="round"/>
|
||||
<path d="M4.5 4.5L11.5 11.5" stroke="black" stroke-linecap="square" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 291 B |
3
assets/icons/generic_maximize.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.5 4.5H4.5V11.5H11.5V4.5Z" stroke="#FBF1C7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 161 B |
3
assets/icons/generic_minimize.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 8H12" stroke="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 138 B |
4
assets/icons/generic_restore.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M9.5 6.5H3.5V12.5H9.5V6.5Z" stroke="#FBF1C7"/>
|
||||
<path d="M10 8.5L12.5 8.5L12.5 3.5L7.5 3.5L7.5 6" stroke="#FBF1C7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 228 B |
6
assets/icons/line_height.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 13.6667H12" stroke="#B3B3B3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4 2.33333H12" stroke="#B3B3B3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5 11L8 5L11 11" stroke="#B3B3B3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 9H10" stroke="#B3B3B3" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 539 B |
1
assets/icons/visible.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-eye"><path d="M2 12s3-7 10-7 10 7 10 7-3 7-10 7-10-7-10-7Z"/><circle cx="12" cy="12" r="3"/></svg>
|
||||
|
After Width: | Height: | Size: 301 B |
@@ -70,6 +70,11 @@
|
||||
{
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"f2": "project_panel::Rename",
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"cmd-x": "project_panel::Cut",
|
||||
"cmd-c": "project_panel::Copy",
|
||||
"cmd-v": "project_panel::Paste",
|
||||
"ctrl-[": "project_panel::CollapseSelectedEntry",
|
||||
"ctrl-b": "project_panel::CollapseSelectedEntry",
|
||||
"alt-b": "project_panel::CollapseSelectedEntry",
|
||||
@@ -77,5 +82,13 @@
|
||||
"ctrl-f": "project_panel::ExpandSelectedEntry",
|
||||
"ctrl-shift-c": "project_panel::CopyPath"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectPanel && not_editing",
|
||||
"bindings": {
|
||||
"a": "project_panel::NewFile",
|
||||
"shift-a": "project_panel::NewDirectory",
|
||||
"shift-d": "project_panel::Duplicate"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -55,11 +55,13 @@
|
||||
"up": "editor::MoveUp",
|
||||
"ctrl-up": "editor::LineUp",
|
||||
"ctrl-down": "editor::LineDown",
|
||||
"pageup": "editor::PageUp",
|
||||
"pageup": "editor::MovePageUp",
|
||||
"alt-pageup": "editor::PageUp",
|
||||
"shift-pageup": "editor::SelectPageUp",
|
||||
"home": "editor::MoveToBeginningOfLine",
|
||||
"down": "editor::MoveDown",
|
||||
"pagedown": "editor::PageDown",
|
||||
"pagedown": "editor::MovePageDown",
|
||||
"alt-pagedown": "editor::PageDown",
|
||||
"shift-pagedown": "editor::SelectPageDown",
|
||||
"end": "editor::MoveToEndOfLine",
|
||||
"left": "editor::MoveLeft",
|
||||
@@ -152,7 +154,9 @@
|
||||
// "focus": false
|
||||
// }
|
||||
// ],
|
||||
"ctrl->": "assistant::QuoteSelection"
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"ctrl-alt-e": "editor::SelectEnclosingSymbol"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -545,6 +549,7 @@
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
"ctrl-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
"enter": "assistant::ConfirmCommand",
|
||||
@@ -586,8 +591,9 @@
|
||||
"alt-ctrl-shift-c": "project_panel::CopyRelativePath",
|
||||
"f2": "project_panel::Rename",
|
||||
"enter": "project_panel::Rename",
|
||||
"backspace": "project_panel::Trash",
|
||||
"delete": "project_panel::Trash",
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"alt-ctrl-r": "project_panel::RevealInFinder",
|
||||
@@ -649,13 +655,20 @@
|
||||
"ctrl-insert": "terminal::Copy",
|
||||
"shift-ctrl-v": "terminal::Paste",
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
"shift-down": "terminal::ScrollLineDown",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -61,13 +61,17 @@
|
||||
"cmd-shift-z": "editor::Redo",
|
||||
"up": "editor::MoveUp",
|
||||
"ctrl-up": "editor::MoveToStartOfParagraph",
|
||||
"pageup": "editor::PageUp",
|
||||
"shift-pageup": "editor::MovePageUp",
|
||||
"pageup": "editor::MovePageUp",
|
||||
"shift-pageup": "editor::SelectPageUp",
|
||||
"cmd-pageup": "editor::PageUp",
|
||||
"ctrl-pageup": "editor::LineUp",
|
||||
"home": "editor::MoveToBeginningOfLine",
|
||||
"down": "editor::MoveDown",
|
||||
"ctrl-down": "editor::MoveToEndOfParagraph",
|
||||
"pagedown": "editor::PageDown",
|
||||
"shift-pagedown": "editor::MovePageDown",
|
||||
"pagedown": "editor::MovePageDown",
|
||||
"shift-pagedown": "editor::SelectPageDown",
|
||||
"cmd-pagedown": "editor::PageDown",
|
||||
"ctrl-pagedown": "editor::LineDown",
|
||||
"end": "editor::MoveToEndOfLine",
|
||||
"left": "editor::MoveLeft",
|
||||
"right": "editor::MoveRight",
|
||||
@@ -188,7 +192,9 @@
|
||||
"focus": false
|
||||
}
|
||||
],
|
||||
"cmd->": "assistant::QuoteSelection"
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
"cmd-alt-e": "editor::SelectEnclosingSymbol"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -233,6 +239,7 @@
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
"ctrl-r": "assistant::CycleMessageRole",
|
||||
"enter": "assistant::ConfirmCommand",
|
||||
@@ -604,6 +611,7 @@
|
||||
"left": "project_panel::CollapseSelectedEntry",
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"cmd-n": "project_panel::NewFile",
|
||||
"cmd-d": "project_panel::Duplicate",
|
||||
"alt-cmd-n": "project_panel::NewDirectory",
|
||||
"cmd-x": "project_panel::Cut",
|
||||
"cmd-c": "project_panel::Copy",
|
||||
@@ -613,8 +621,9 @@
|
||||
"enter": "project_panel::Rename",
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"cmd-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }],
|
||||
"cmd-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"alt-cmd-r": "project_panel::RevealInFinder",
|
||||
"alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
@@ -681,6 +690,7 @@
|
||||
"cmd-c": "terminal::Copy",
|
||||
"cmd-v": "terminal::Paste",
|
||||
"cmd-k": "terminal::Clear",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
// Some nice conveniences
|
||||
"cmd-backspace": ["terminal::SendText", "\u0015"],
|
||||
"cmd-right": ["terminal::SendText", "\u0005"],
|
||||
@@ -696,7 +706,13 @@
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
"shift-down": "terminal::ScrollLineDown",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -78,6 +78,7 @@
|
||||
"bindings": {
|
||||
"cmd-shift-o": "file_finder::Toggle",
|
||||
"cmd-shift-a": "command_palette::Toggle",
|
||||
"shift shift": "command_palette::Toggle",
|
||||
"cmd-alt-o": "project_symbols::Toggle",
|
||||
"cmd-1": "workspace::ToggleLeftDock",
|
||||
"cmd-6": "diagnostics::Deploy"
|
||||
@@ -94,6 +95,10 @@
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"enter": "project_panel::Open",
|
||||
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"shift-f6": "project_panel::Rename"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
"ctrl-shift-m": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-d": "editor::DuplicateLineDown",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
"alt-cmd-down": "editor::GoToDefinition",
|
||||
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
|
||||
|
||||
@@ -87,7 +87,15 @@
|
||||
},
|
||||
{
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {}
|
||||
"bindings": {
|
||||
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }],
|
||||
"cmd-d": "project_panel::Duplicate",
|
||||
"cmd-n": "project_panel::NewFolder",
|
||||
"return": "project_panel::Rename",
|
||||
"cmd-c": "project_panel::Copy",
|
||||
"cmd-v": "project_panel::Paste",
|
||||
"cmd-alt-c": "project_panel::CopyPath"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Dock",
|
||||
|
||||
@@ -47,19 +47,16 @@
|
||||
"{": "vim::StartOfParagraph",
|
||||
"}": "vim::EndOfParagraph",
|
||||
"|": "vim::GoToColumn",
|
||||
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
"e": "vim::NextWordEnd",
|
||||
"b": "vim::PreviousWordStart",
|
||||
"g e": "vim::PreviousWordEnd",
|
||||
|
||||
// Subword motions
|
||||
// "w": "vim::NextSubwordStart",
|
||||
// "b": "vim::PreviousSubwordStart",
|
||||
// "e": "vim::NextSubwordEnd",
|
||||
// "g e": "vim::PreviousSubwordEnd",
|
||||
|
||||
"shift-w": [
|
||||
"vim::NextWordStart",
|
||||
{
|
||||
@@ -78,8 +75,12 @@
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
|
||||
|
||||
"g shift-e": [
|
||||
"vim::PreviousWordEnd",
|
||||
{
|
||||
"ignorePunctuation": true
|
||||
}
|
||||
],
|
||||
"/": "vim::Search",
|
||||
"g /": "pane::DeploySearch",
|
||||
"?": [
|
||||
@@ -126,8 +127,22 @@
|
||||
}
|
||||
],
|
||||
"m": ["vim::PushOperator", "Mark"],
|
||||
"'": ["vim::PushOperator", { "Jump": { "line": true } }],
|
||||
"`": ["vim::PushOperator", { "Jump": { "line": false } }],
|
||||
"'": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Jump": {
|
||||
"line": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"`": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Jump": {
|
||||
"line": false
|
||||
}
|
||||
}
|
||||
],
|
||||
";": "vim::RepeatFind",
|
||||
",": "vim::RepeatFindReversed",
|
||||
"ctrl-o": "pane::GoBack",
|
||||
@@ -331,7 +346,6 @@
|
||||
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"],
|
||||
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
@@ -389,6 +403,9 @@
|
||||
"g shift-u": ["vim::PushOperator", "Uppercase"],
|
||||
"g ~": ["vim::PushOperator", "OppositeCase"],
|
||||
"\"": ["vim::PushOperator", "Register"],
|
||||
"q": "vim::ToggleRecord",
|
||||
"shift-q": "vim::ReplayLastRecording",
|
||||
"@": ["vim::PushOperator", "ReplayRegister"],
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
// tree-sitter related commands
|
||||
@@ -676,7 +693,8 @@
|
||||
{
|
||||
"context": "EmptyPane || SharedScreen",
|
||||
"bindings": {
|
||||
":": "command_palette::Toggle"
|
||||
":": "command_palette::Toggle",
|
||||
"g /": "pane::DeploySearch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -704,5 +722,14 @@
|
||||
"g g": "menu::SelectFirst",
|
||||
"-": "project_panel::SelectParent"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "OutlinePanel",
|
||||
"bindings": {
|
||||
"j": "menu::SelectNext",
|
||||
"k": "menu::SelectPrev",
|
||||
"shift-g": "menu::SelectLast",
|
||||
"g g": "menu::SelectFirst"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
{
|
||||
// The name of the Zed theme to use for the UI.
|
||||
//
|
||||
// The theme can also be set to follow system preferences:
|
||||
//
|
||||
// "theme": {
|
||||
// "mode": "system",
|
||||
// "light": "One Light",
|
||||
// "dark": "One Dark"
|
||||
// }
|
||||
//
|
||||
// Where `mode` is one of:
|
||||
// `mode` is one of:
|
||||
// - "system": Use the theme that corresponds to the system's appearance
|
||||
// - "light": Use the theme indicated by the "light" field
|
||||
// - "dark": Use the theme indicated by the "dark" field
|
||||
"theme": "One Dark",
|
||||
"theme": {
|
||||
"mode": "system",
|
||||
"light": "One Light",
|
||||
"dark": "One Dark"
|
||||
},
|
||||
// The name of a base set of key bindings to use.
|
||||
// This setting can take four values, each named after another
|
||||
// text editor:
|
||||
@@ -29,7 +25,7 @@
|
||||
"inline_completion_provider": "copilot"
|
||||
},
|
||||
// The name of a font to use for rendering text in the editor
|
||||
"buffer_font_family": "Zed Mono",
|
||||
"buffer_font_family": "Zed Plex Mono",
|
||||
// The OpenType features to enable for text in the editor.
|
||||
"buffer_font_features": {
|
||||
// Disable ligatures:
|
||||
@@ -42,16 +38,17 @@
|
||||
// Set the buffer's line height.
|
||||
// May take 3 values:
|
||||
// 1. Use a line height that's comfortable for reading (1.618)
|
||||
// "line_height": "comfortable"
|
||||
// "buffer_line_height": "comfortable"
|
||||
// 2. Use a standard line height, (1.3)
|
||||
// "line_height": "standard",
|
||||
// "buffer_line_height": "standard",
|
||||
// 3. Use a custom line height
|
||||
// "line_height": {
|
||||
// "buffer_line_height": {
|
||||
// "custom": 2
|
||||
// },
|
||||
"buffer_line_height": "comfortable",
|
||||
// The name of a font to use for rendering text in the UI
|
||||
"ui_font_family": ".SystemUIFont",
|
||||
// (On macOS) You can set this to ".SysmtemUIFont" to use the system font
|
||||
"ui_font_family": "Zed Plex Sans",
|
||||
// The OpenType features to enable for text in the UI
|
||||
"ui_font_features": {
|
||||
// Disable ligatures:
|
||||
@@ -165,11 +162,11 @@
|
||||
// 1. Draw tabs and spaces only for the selected text (default):
|
||||
// "selection"
|
||||
// 2. Do not draw any tabs or spaces:
|
||||
// "none"
|
||||
// "none"
|
||||
// 3. Draw all invisible symbols:
|
||||
// "all"
|
||||
// "all"
|
||||
// 4. Draw whitespaces at boundaries only:
|
||||
// "boundaries"
|
||||
// "boundary"
|
||||
// For a whitespace to be on a boundary, any of the following conditions need to be met:
|
||||
// - It is a tab
|
||||
// - It is adjacent to an edge (start or end)
|
||||
@@ -310,8 +307,8 @@
|
||||
// when a corresponding project entry becomes active.
|
||||
// Gitignored entries are never auto revealed.
|
||||
"auto_reveal_entries": true,
|
||||
/// Whether to fold directories automatically
|
||||
/// when a directory has only one directory inside.
|
||||
// Whether to fold directories automatically and show compact folders
|
||||
// (e.g. "a/b/c" ) when a directory has only one subdirectory inside.
|
||||
"auto_fold_dirs": false,
|
||||
/// Scrollbar-related settings
|
||||
"scrollbar": {
|
||||
@@ -669,13 +666,17 @@
|
||||
// "font_size": 15,
|
||||
// Set the terminal's font family. If this option is not included,
|
||||
// the terminal will default to matching the buffer's font family.
|
||||
// "font_family": "Zed Mono",
|
||||
// "font_family": "Zed Plex Mono",
|
||||
// Sets the maximum number of lines in the terminal's scrollback buffer.
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
// "max_scroll_history_lines": 10000,
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
/// Settings related to running tasks.
|
||||
"tasks": {
|
||||
"variables": {}
|
||||
},
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
// use those languages.
|
||||
@@ -688,7 +689,9 @@
|
||||
// "TOML": ["Embargo.lock"]
|
||||
// }
|
||||
//
|
||||
"file_types": {},
|
||||
"file_types": {
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json"]
|
||||
},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
// If you don't want any of these extensions, add this field to your settings
|
||||
@@ -758,6 +761,11 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"JSONC": {
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"Markdown": {
|
||||
"format_on_save": "off",
|
||||
"prettier": {
|
||||
|
||||
@@ -8,5 +8,10 @@
|
||||
// from the command palette or from `Zed` application menu.
|
||||
{
|
||||
"ui_font_size": 16,
|
||||
"buffer_font_size": 16
|
||||
"buffer_font_size": 16,
|
||||
"theme": {
|
||||
"mode": "system",
|
||||
"light": "One Light",
|
||||
"dark": "One Dark"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
version: "3.7"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
@@ -52,4 +52,13 @@ impl Assets {
|
||||
|
||||
cx.text_system().add_fonts(embedded_fonts)
|
||||
}
|
||||
|
||||
pub fn load_test_fonts(&self, cx: &AppContext) {
|
||||
cx.text_system()
|
||||
.add_fonts(vec![self
|
||||
.load("fonts/plex-mono/ZedPlexMono-Regular.ttf")
|
||||
.unwrap()
|
||||
.unwrap()])
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ path = "src/assistant.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
async-watch.workspace = true
|
||||
cargo_toml.workspace = true
|
||||
@@ -23,6 +23,7 @@ client.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
file_icons.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
@@ -31,6 +32,7 @@ gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
@@ -44,7 +46,6 @@ paths.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
rustdoc.workspace = true
|
||||
schemars.workspace = true
|
||||
search.workspace = true
|
||||
semantic_index.workspace = true
|
||||
@@ -56,6 +57,7 @@ smol.workspace = true
|
||||
strsim = "0.11"
|
||||
strum.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
theme.workspace = true
|
||||
tiktoken-rs.workspace = true
|
||||
|
||||
@@ -9,19 +9,20 @@ mod prompts;
|
||||
mod search;
|
||||
mod slash_command;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
|
||||
pub use assistant_panel::AssistantPanel;
|
||||
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
use assistant_settings::{AnthropicModel, AssistantSettings, CloudModel, OllamaModel, OpenAiModel};
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub(crate) use completion_provider::*;
|
||||
pub(crate) use context_store::*;
|
||||
use fs::Fs;
|
||||
use gpui::{actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
pub(crate) use inline_assistant::*;
|
||||
pub(crate) use model_selector::*;
|
||||
use rustdoc::RustdocStore;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
@@ -42,6 +43,7 @@ actions!(
|
||||
Split,
|
||||
CycleMessageRole,
|
||||
QuoteSelection,
|
||||
InsertIntoEditor,
|
||||
ToggleFocus,
|
||||
ResetKey,
|
||||
InlineAssist,
|
||||
@@ -264,7 +266,7 @@ impl Assistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
cx.set_global(Assistant::default());
|
||||
AssistantSettings::register(cx);
|
||||
|
||||
@@ -288,8 +290,9 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
assistant_slash_command::init(cx);
|
||||
register_slash_commands(cx);
|
||||
assistant_panel::init(cx);
|
||||
inline_assistant::init(client.telemetry().clone(), cx);
|
||||
RustdocStore::init_global(cx);
|
||||
inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
|
||||
terminal_inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
|
||||
IndexedDocsRegistry::init_global(cx);
|
||||
|
||||
CommandPaletteFilter::update_global(cx, |filter, _cx| {
|
||||
filter.hide_namespace(Assistant::NAMESPACE);
|
||||
@@ -324,6 +327,24 @@ fn register_slash_commands(cx: &mut AppContext) {
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
match count {
|
||||
0..=999 => count.to_string(),
|
||||
1000..=9999 => {
|
||||
let thousands = count / 1000;
|
||||
let hundreds = (count % 1000 + 50) / 100;
|
||||
if hundreds == 0 {
|
||||
format!("{}k", thousands)
|
||||
} else if hundreds == 10 {
|
||||
format!("{}k", thousands + 1)
|
||||
} else {
|
||||
format!("{}.{}k", thousands, hundreds)
|
||||
}
|
||||
}
|
||||
_ => format!("{}k", (count + 500) / 1000),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use crate::{
|
||||
assistant_settings::{AssistantDockPosition, AssistantSettings},
|
||||
humanize_token_count,
|
||||
prompt_library::open_prompt_library,
|
||||
search::*,
|
||||
slash_command::{
|
||||
default_command::DefaultSlashCommand, SlashCommandCompletionProvider, SlashCommandLine,
|
||||
SlashCommandRegistry,
|
||||
},
|
||||
terminal_inline_assistant::TerminalInlineAssistant,
|
||||
ApplyEdit, Assist, CompletionProvider, ConfirmCommand, ContextStore, CycleMessageRole,
|
||||
InlineAssist, InlineAssistant, LanguageModelRequest, LanguageModelRequestMessage, MessageId,
|
||||
MessageMetadata, MessageStatus, ModelSelector, QuoteSelection, ResetKey, Role, SavedContext,
|
||||
SavedContextMetadata, SavedMessage, Split, ToggleFocus, ToggleHistory, ToggleModelSelector,
|
||||
InlineAssist, InlineAssistant, InsertIntoEditor, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, MessageId, MessageMetadata, MessageStatus, ModelSelector,
|
||||
QuoteSelection, ResetKey, Role, SavedContext, SavedContextMetadata, SavedMessage, Split,
|
||||
ToggleFocus, ToggleHistory, ToggleModelSelector,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutput, SlashCommandOutputSection};
|
||||
@@ -36,6 +39,7 @@ use gpui::{
|
||||
Subscription, Task, Transformation, UpdateGlobal, View, ViewContext, VisualContext, WeakView,
|
||||
WindowContext,
|
||||
};
|
||||
use indexed_docs::{IndexedDocsStore, PackageName, ProviderId};
|
||||
use language::{
|
||||
language_settings::SoftWrap, AnchorRangeExt as _, AutoindentMode, Buffer, LanguageRegistry,
|
||||
LspAdapterDelegate, OffsetRangeExt as _, Point, ToOffset as _,
|
||||
@@ -44,7 +48,6 @@ use multi_buffer::MultiBufferRow;
|
||||
use paths::contexts_dir;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Project, ProjectLspAdapterDelegate, ProjectTransaction};
|
||||
use rustdoc::{CrateName, RustdocStore};
|
||||
use search::{buffer_search::DivRegistrar, BufferSearchBar};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
@@ -57,6 +60,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry_events::AssistantKind;
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, ContextMenu, Disclosure, ElevationIndex, KeyBinding, ListItem,
|
||||
ListItemSpacing, PopoverMenu, PopoverMenuHandle, Tab, TabBar, Tooltip,
|
||||
@@ -83,12 +87,17 @@ pub fn init(cx: &mut AppContext) {
|
||||
workspace.toggle_panel_focus::<AssistantPanel>(cx);
|
||||
})
|
||||
.register_action(AssistantPanel::inline_assist)
|
||||
.register_action(ContextEditor::quote_selection);
|
||||
.register_action(ContextEditor::quote_selection)
|
||||
.register_action(ContextEditor::insert_selection);
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub enum AssistantPanelEvent {
|
||||
ContextEdited,
|
||||
}
|
||||
|
||||
pub struct AssistantPanel {
|
||||
workspace: WeakView<Workspace>,
|
||||
width: Option<Pixels>,
|
||||
@@ -119,6 +128,11 @@ enum SavedContextPickerEvent {
|
||||
Confirmed { path: PathBuf },
|
||||
}
|
||||
|
||||
enum InlineAssistTarget {
|
||||
Editor(View<Editor>, bool),
|
||||
Terminal(View<TerminalView>),
|
||||
}
|
||||
|
||||
impl EventEmitter<SavedContextPickerEvent> for Picker<SavedContextPickerDelegate> {}
|
||||
|
||||
impl SavedContextPickerDelegate {
|
||||
@@ -360,11 +374,113 @@ impl AssistantPanel {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(assistant) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
let Some(assistant_panel) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let context_editor = assistant
|
||||
let Some(inline_assist_target) =
|
||||
Self::resolve_inline_assist_target(workspace, &assistant_panel, cx)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if assistant_panel.update(cx, |assistant, cx| assistant.is_authenticated(cx)) {
|
||||
match inline_assist_target {
|
||||
InlineAssistTarget::Editor(active_editor, include_context) => {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
Some(cx.view().downgrade()),
|
||||
include_context.then_some(&assistant_panel),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
InlineAssistTarget::Terminal(active_terminal) => {
|
||||
TerminalInlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_terminal,
|
||||
Some(cx.view().downgrade()),
|
||||
Some(&assistant_panel),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let assistant_panel = assistant_panel.downgrade();
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
assistant_panel
|
||||
.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
.await?;
|
||||
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? {
|
||||
cx.update(|cx| match inline_assist_target {
|
||||
InlineAssistTarget::Editor(active_editor, include_context) => {
|
||||
let assistant_panel = if include_context {
|
||||
assistant_panel.upgrade()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
Some(workspace),
|
||||
assistant_panel.as_ref(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
InlineAssistTarget::Terminal(active_terminal) => {
|
||||
TerminalInlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_terminal,
|
||||
Some(workspace),
|
||||
assistant_panel.upgrade().as_ref(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
})?
|
||||
} else {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx)
|
||||
})?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_inline_assist_target(
|
||||
workspace: &mut Workspace,
|
||||
assistant_panel: &View<AssistantPanel>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<InlineAssistTarget> {
|
||||
if let Some(terminal_panel) = workspace.panel::<TerminalPanel>(cx) {
|
||||
if terminal_panel
|
||||
.read(cx)
|
||||
.focus_handle(cx)
|
||||
.contains_focused(cx)
|
||||
{
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
if !cx.has_flag::<feature_flags::TerminalInlineAssist>() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(terminal_view) = terminal_panel
|
||||
.read(cx)
|
||||
.pane()
|
||||
.read(cx)
|
||||
.active_item()
|
||||
.and_then(|t| t.downcast::<TerminalView>())
|
||||
{
|
||||
return Some(InlineAssistTarget::Terminal(terminal_view));
|
||||
}
|
||||
}
|
||||
}
|
||||
let context_editor = assistant_panel
|
||||
.read(cx)
|
||||
.active_context_editor()
|
||||
.and_then(|editor| {
|
||||
@@ -376,51 +492,15 @@ impl AssistantPanel {
|
||||
}
|
||||
});
|
||||
|
||||
let include_context;
|
||||
let active_editor;
|
||||
if let Some(context_editor) = context_editor {
|
||||
active_editor = context_editor;
|
||||
include_context = false;
|
||||
Some(InlineAssistTarget::Editor(context_editor, false))
|
||||
} else if let Some(workspace_editor) = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.act_as::<Editor>(cx))
|
||||
{
|
||||
active_editor = workspace_editor;
|
||||
include_context = true;
|
||||
Some(InlineAssistTarget::Editor(workspace_editor, true))
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
if assistant.update(cx, |assistant, cx| assistant.is_authenticated(cx)) {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
Some(cx.view().downgrade()),
|
||||
include_context,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
let assistant = assistant.downgrade();
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
assistant
|
||||
.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
.await?;
|
||||
if assistant.update(&mut cx, |assistant, cx| assistant.is_authenticated(cx))? {
|
||||
cx.update(|cx| {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(&active_editor, Some(workspace), include_context, cx)
|
||||
})
|
||||
})?
|
||||
} else {
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx)
|
||||
})?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -460,7 +540,7 @@ impl AssistantPanel {
|
||||
_subscriptions: subscriptions,
|
||||
});
|
||||
self.show_saved_contexts = false;
|
||||
|
||||
cx.emit(AssistantPanelEvent::ContextEdited);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -472,6 +552,7 @@ impl AssistantPanel {
|
||||
) {
|
||||
match event {
|
||||
ContextEditorEvent::TabContentChanged => cx.notify(),
|
||||
ContextEditorEvent::Edited => cx.emit(AssistantPanelEvent::ContextEdited),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -863,18 +944,33 @@ impl AssistantPanel {
|
||||
context: &Model<Context>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
let remaining_tokens = context.read(cx).remaining_tokens(cx)?;
|
||||
let remaining_tokens_color = if remaining_tokens <= 0 {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let token_count = context.read(cx).token_count()?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let token_count_color = if remaining_tokens <= 0 {
|
||||
Color::Error
|
||||
} else if remaining_tokens <= 500 {
|
||||
} else if token_count as f32 / max_token_count as f32 >= 0.8 {
|
||||
Color::Warning
|
||||
} else {
|
||||
Color::Muted
|
||||
};
|
||||
|
||||
Some(
|
||||
Label::new(remaining_tokens.to_string())
|
||||
.size(LabelSize::Small)
|
||||
.color(remaining_tokens_color),
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(humanize_token_count(token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(token_count_color),
|
||||
)
|
||||
.child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(humanize_token_count(max_token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -978,6 +1074,7 @@ impl Panel for AssistantPanel {
|
||||
}
|
||||
|
||||
impl EventEmitter<PanelEvent> for AssistantPanel {}
|
||||
impl EventEmitter<AssistantPanelEvent> for AssistantPanel {}
|
||||
|
||||
impl FocusableView for AssistantPanel {
|
||||
fn focus_handle(&self, _cx: &AppContext) -> FocusHandle {
|
||||
@@ -1538,11 +1635,6 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
fn remaining_tokens(&self, cx: &AppContext) -> Option<isize> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
Some(model.max_token_count() as isize - self.token_count? as isize)
|
||||
}
|
||||
|
||||
fn completion_provider_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
self.count_remaining_tokens(cx);
|
||||
}
|
||||
@@ -2183,6 +2275,7 @@ struct PendingCompletion {
|
||||
}
|
||||
|
||||
enum ContextEditorEvent {
|
||||
Edited,
|
||||
TabContentChanged,
|
||||
}
|
||||
|
||||
@@ -2775,6 +2868,7 @@ impl ContextEditor {
|
||||
EditorEvent::SelectionsChanged { .. } => {
|
||||
self.scroll_position = self.cursor_scroll_position(cx);
|
||||
}
|
||||
EditorEvent::BufferEdited => cx.emit(ContextEditorEvent::Edited),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -2883,6 +2977,42 @@ impl ContextEditor {
|
||||
});
|
||||
}
|
||||
|
||||
fn insert_selection(
|
||||
workspace: &mut Workspace,
|
||||
_: &InsertIntoEditor,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
let Some(context_editor_view) = panel.read(cx).active_context_editor().cloned() else {
|
||||
return;
|
||||
};
|
||||
let Some(active_editor_view) = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.act_as::<Editor>(cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let context_editor = context_editor_view.read(cx).editor.read(cx);
|
||||
let anchor = context_editor.selections.newest_anchor();
|
||||
let text = context_editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.read(cx)
|
||||
.text_for_range(anchor.range())
|
||||
.collect::<String>();
|
||||
|
||||
// If nothing is selected, don't delete the current selection; instead, be a no-op.
|
||||
if !text.is_empty() {
|
||||
active_editor_view.update(cx, |editor, cx| {
|
||||
editor.insert(&text, cx);
|
||||
editor.focus(cx);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn quote_selection(
|
||||
workspace: &mut Workspace,
|
||||
_: &QuoteSelection,
|
||||
@@ -3280,7 +3410,9 @@ fn render_rustdoc_slash_command_trailer(
|
||||
command: PendingSlashCommand,
|
||||
cx: &mut WindowContext,
|
||||
) -> AnyElement {
|
||||
let rustdoc_store = RustdocStore::global(cx);
|
||||
let Some(rustdoc_store) = IndexedDocsStore::try_global(ProviderId::rustdoc(), cx).ok() else {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let Some((crate_name, _)) = command
|
||||
.argument
|
||||
@@ -3290,7 +3422,7 @@ fn render_rustdoc_slash_command_trailer(
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let crate_name = CrateName::from(crate_name);
|
||||
let crate_name = PackageName::from(crate_name);
|
||||
if !rustdoc_store.is_indexing(&crate_name) {
|
||||
return Empty.into_any();
|
||||
}
|
||||
|
||||
@@ -169,6 +169,7 @@ pub enum AssistantProvider {
|
||||
model: OpenAiModel,
|
||||
api_url: String,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
available_models: Vec<OpenAiModel>,
|
||||
},
|
||||
Anthropic {
|
||||
model: AnthropicModel,
|
||||
@@ -188,6 +189,7 @@ impl Default for AssistantProvider {
|
||||
model: OpenAiModel::default(),
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -202,6 +204,7 @@ pub enum AssistantProviderContent {
|
||||
default_model: Option<OpenAiModel>,
|
||||
api_url: Option<String>,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
available_models: Option<Vec<OpenAiModel>>,
|
||||
},
|
||||
#[serde(rename = "anthropic")]
|
||||
Anthropic {
|
||||
@@ -272,6 +275,7 @@ impl AssistantSettingsContent {
|
||||
default_model: settings.default_open_ai_model.clone(),
|
||||
api_url: Some(open_ai_api_url.clone()),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
})
|
||||
} else {
|
||||
settings.default_open_ai_model.clone().map(|open_ai_model| {
|
||||
@@ -279,6 +283,7 @@ impl AssistantSettingsContent {
|
||||
default_model: Some(open_ai_model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
}
|
||||
})
|
||||
},
|
||||
@@ -326,6 +331,14 @@ impl AssistantSettingsContent {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
}
|
||||
Some(AssistantProviderContent::Ollama {
|
||||
default_model: model,
|
||||
..
|
||||
}) => {
|
||||
if let LanguageModel::Ollama(new_model) = new_model {
|
||||
*model = Some(new_model);
|
||||
}
|
||||
}
|
||||
provider => match new_model {
|
||||
LanguageModel::Cloud(model) => {
|
||||
*provider = Some(AssistantProviderContent::ZedDotDev {
|
||||
@@ -337,6 +350,7 @@ impl AssistantSettingsContent {
|
||||
default_model: Some(model),
|
||||
api_url: None,
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Some(Default::default()),
|
||||
})
|
||||
}
|
||||
LanguageModel::Anthropic(model) => {
|
||||
@@ -481,15 +495,18 @@ impl Settings for AssistantSettings {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
AssistantProviderContent::OpenAi {
|
||||
default_model: model_override,
|
||||
api_url: api_url_override,
|
||||
low_speed_timeout_in_seconds: low_speed_timeout_in_seconds_override,
|
||||
available_models: available_models_override,
|
||||
},
|
||||
) => {
|
||||
merge(model, model_override);
|
||||
merge(api_url, api_url_override);
|
||||
merge(available_models, available_models_override);
|
||||
if let Some(low_speed_timeout_in_seconds_override) =
|
||||
low_speed_timeout_in_seconds_override
|
||||
{
|
||||
@@ -550,10 +567,12 @@ impl Settings for AssistantSettings {
|
||||
default_model: model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
} => AssistantProvider::OpenAi {
|
||||
model: model.unwrap_or_default(),
|
||||
api_url: api_url.unwrap_or_else(|| open_ai::OPEN_AI_API_URL.into()),
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: available_models.unwrap_or_default(),
|
||||
},
|
||||
AssistantProviderContent::Anthropic {
|
||||
default_model: model,
|
||||
@@ -610,6 +629,7 @@ mod tests {
|
||||
model: OpenAiModel::FourOmni,
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
|
||||
@@ -632,6 +652,7 @@ mod tests {
|
||||
model: OpenAiModel::FourOmni,
|
||||
api_url: "test-url".into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
@@ -652,6 +673,7 @@ mod tests {
|
||||
model: OpenAiModel::Four,
|
||||
api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
low_speed_timeout_in_seconds: None,
|
||||
available_models: Default::default(),
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
@@ -24,6 +24,20 @@ use settings::{Settings, SettingsStore};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Choose which model to use for openai provider.
|
||||
/// If the model is not available, try to use the first available model, or fallback to the original model.
|
||||
fn choose_openai_model(
|
||||
model: &::open_ai::Model,
|
||||
available_models: &[::open_ai::Model],
|
||||
) -> ::open_ai::Model {
|
||||
available_models
|
||||
.iter()
|
||||
.find(|&m| m == model)
|
||||
.or_else(|| available_models.first())
|
||||
.unwrap_or_else(|| model)
|
||||
.clone()
|
||||
}
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
let mut settings_version = 0;
|
||||
let provider = match &AssistantSettings::get_global(cx).provider {
|
||||
@@ -34,8 +48,9 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
} => CompletionProvider::OpenAi(OpenAiCompletionProvider::new(
|
||||
model.clone(),
|
||||
choose_openai_model(model, available_models),
|
||||
api_url.clone(),
|
||||
client.http_client(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
@@ -77,10 +92,11 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
) => {
|
||||
provider.update(
|
||||
model.clone(),
|
||||
choose_openai_model(model, available_models),
|
||||
api_url.clone(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
settings_version,
|
||||
@@ -136,10 +152,11 @@ pub fn init(client: Arc<Client>, cx: &mut AppContext) {
|
||||
model,
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
) => {
|
||||
*provider = CompletionProvider::OpenAi(OpenAiCompletionProvider::new(
|
||||
model.clone(),
|
||||
choose_openai_model(model, available_models),
|
||||
api_url.clone(),
|
||||
client.http_client(),
|
||||
low_speed_timeout_in_seconds.map(Duration::from_secs),
|
||||
@@ -201,10 +218,10 @@ impl CompletionProvider {
|
||||
cx.global::<Self>()
|
||||
}
|
||||
|
||||
pub fn available_models(&self) -> Vec<LanguageModel> {
|
||||
pub fn available_models(&self, cx: &AppContext) -> Vec<LanguageModel> {
|
||||
match self {
|
||||
CompletionProvider::OpenAi(provider) => provider
|
||||
.available_models()
|
||||
.available_models(cx)
|
||||
.map(LanguageModel::OpenAi)
|
||||
.collect(),
|
||||
CompletionProvider::Anthropic(provider) => provider
|
||||
|
||||
@@ -236,7 +236,7 @@ pub fn preprocess_anthropic_request(request: &mut LanguageModelRequest) {
|
||||
}
|
||||
|
||||
if !system_message.is_empty() {
|
||||
request.messages.insert(
|
||||
new_messages.insert(
|
||||
0,
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::assistant_settings::CloudModel;
|
||||
use crate::assistant_settings::{AssistantProvider, AssistantSettings};
|
||||
use crate::{
|
||||
assistant_settings::OpenAiModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
|
||||
};
|
||||
@@ -56,8 +57,26 @@ impl OpenAiCompletionProvider {
|
||||
self.settings_version = settings_version;
|
||||
}
|
||||
|
||||
pub fn available_models(&self) -> impl Iterator<Item = OpenAiModel> {
|
||||
OpenAiModel::iter()
|
||||
pub fn available_models(&self, cx: &AppContext) -> impl Iterator<Item = OpenAiModel> {
|
||||
if let AssistantProvider::OpenAi {
|
||||
available_models, ..
|
||||
} = &AssistantSettings::get_global(cx).provider
|
||||
{
|
||||
if !available_models.is_empty() {
|
||||
// available_models is set, just return it
|
||||
return available_models.clone().into_iter();
|
||||
}
|
||||
}
|
||||
let available_models = if matches!(self.model, OpenAiModel::Custom { .. }) {
|
||||
// available_models is not set but the default model is set to custom, only show custom
|
||||
vec![self.model.clone()]
|
||||
} else {
|
||||
// default case, use all models except custom
|
||||
OpenAiModel::iter()
|
||||
.filter(|model| !matches!(model, OpenAiModel::Custom { .. }))
|
||||
.collect()
|
||||
};
|
||||
available_models.into_iter()
|
||||
}
|
||||
|
||||
pub fn settings_version(&self) -> usize {
|
||||
@@ -213,7 +232,8 @@ pub fn count_open_ai_tokens(
|
||||
| LanguageModel::Cloud(CloudModel::Claude3_5Sonnet)
|
||||
| LanguageModel::Cloud(CloudModel::Claude3Opus)
|
||||
| LanguageModel::Cloud(CloudModel::Claude3Sonnet)
|
||||
| LanguageModel::Cloud(CloudModel::Claude3Haiku) => {
|
||||
| LanguageModel::Cloud(CloudModel::Claude3Haiku)
|
||||
| LanguageModel::OpenAi(OpenAiModel::Custom { .. }) => {
|
||||
// Tiktoken doesn't yet support these models, so we manually use the
|
||||
// same tokenizer as GPT-4.
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::{
|
||||
prompts::generate_content_prompt, AssistantPanel, CompletionProvider, Hunk,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, Role, StreamingDiff,
|
||||
assistant_settings::AssistantSettings, humanize_token_count, prompts::generate_content_prompt,
|
||||
AssistantPanel, AssistantPanelEvent, CompletionProvider, Hunk, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, Role, StreamingDiff,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{hash_map, HashMap, HashSet, VecDeque};
|
||||
use editor::{
|
||||
@@ -14,17 +15,18 @@ use editor::{
|
||||
Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
|
||||
ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
|
||||
use gpui::{
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, Global,
|
||||
HighlightStyle, Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View,
|
||||
ViewContext, WeakView, WhiteSpace, WindowContext,
|
||||
point, AppContext, EventEmitter, FocusHandle, FocusableView, FontStyle, Global, HighlightStyle,
|
||||
Model, ModelContext, Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView,
|
||||
WhiteSpace, WindowContext,
|
||||
};
|
||||
use language::{Buffer, Point, Selection, TransactionId};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use rope::Rope;
|
||||
use settings::Settings;
|
||||
use settings::{update_settings_file, Settings};
|
||||
use similar::TextDiff;
|
||||
use std::{
|
||||
cmp, mem,
|
||||
@@ -32,15 +34,15 @@ use std::{
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
task::{self, Poll},
|
||||
time::Instant,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Tooltip};
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
use util::RangeExt;
|
||||
use workspace::{notifications::NotificationId, Toast, Workspace};
|
||||
|
||||
pub fn init(telemetry: Arc<Telemetry>, cx: &mut AppContext) {
|
||||
cx.set_global(InlineAssistant::new(telemetry));
|
||||
pub fn init(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>, cx: &mut AppContext) {
|
||||
cx.set_global(InlineAssistant::new(fs, telemetry));
|
||||
}
|
||||
|
||||
const PROMPT_HISTORY_MAX_LEN: usize = 20;
|
||||
@@ -53,12 +55,13 @@ pub struct InlineAssistant {
|
||||
assist_groups: HashMap<InlineAssistGroupId, InlineAssistGroup>,
|
||||
prompt_history: VecDeque<String>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
impl Global for InlineAssistant {}
|
||||
|
||||
impl InlineAssistant {
|
||||
pub fn new(telemetry: Arc<Telemetry>) -> Self {
|
||||
pub fn new(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>) -> Self {
|
||||
Self {
|
||||
next_assist_id: InlineAssistId::default(),
|
||||
next_assist_group_id: InlineAssistGroupId::default(),
|
||||
@@ -67,6 +70,7 @@ impl InlineAssistant {
|
||||
assist_groups: HashMap::default(),
|
||||
prompt_history: VecDeque::default(),
|
||||
telemetry: Some(telemetry),
|
||||
fs,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,7 +78,7 @@ impl InlineAssistant {
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
include_context: bool,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
@@ -151,7 +155,10 @@ impl InlineAssistant {
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
editor,
|
||||
assistant_panel,
|
||||
workspace.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -208,7 +215,7 @@ impl InlineAssistant {
|
||||
InlineAssist::new(
|
||||
assist_id,
|
||||
assist_group_id,
|
||||
include_context,
|
||||
assistant_panel.is_some(),
|
||||
editor,
|
||||
&prompt_editor,
|
||||
block_ids[0],
|
||||
@@ -706,8 +713,6 @@ impl InlineAssistant {
|
||||
return;
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
|
||||
let Some(user_prompt) = assist
|
||||
.decorations
|
||||
.as_ref()
|
||||
@@ -716,115 +721,138 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
let context = if assist.include_context {
|
||||
assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?.read(cx);
|
||||
let assistant_panel = workspace.panel::<AssistantPanel>(cx)?;
|
||||
assistant_panel.read(cx).active_context(cx)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let editor = if let Some(editor) = assist.editor.upgrade() {
|
||||
editor
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project_name = assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?;
|
||||
Some(
|
||||
workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_root_names(cx)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("/"),
|
||||
)
|
||||
});
|
||||
|
||||
self.prompt_history.retain(|prompt| *prompt != user_prompt);
|
||||
self.prompt_history.push_back(user_prompt.clone());
|
||||
if self.prompt_history.len() > PROMPT_HISTORY_MAX_LEN {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
let codegen = assist.codegen.clone();
|
||||
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let range = codegen.read(cx).range.clone();
|
||||
let start = snapshot.point_to_buffer_offset(range.start);
|
||||
let end = snapshot.point_to_buffer_offset(range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
let (start_buffer, start_buffer_offset) = start;
|
||||
let (end_buffer, end_buffer_offset) = end;
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
self.finish_assist(assist_id, false, cx);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
self.finish_assist(assist_id, false, cx);
|
||||
return;
|
||||
};
|
||||
|
||||
let language = buffer.language_at(range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
|
||||
None
|
||||
} else {
|
||||
Some(language.name())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Higher Temperature increases the randomness of model outputs.
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
let prompt = cx.background_executor().spawn(async move {
|
||||
let language_name = language_name.as_deref();
|
||||
generate_content_prompt(user_prompt, language_name, buffer, range, project_name)
|
||||
});
|
||||
|
||||
let mut messages = Vec::new();
|
||||
if let Some(context) = context {
|
||||
let request = context.read(cx).to_completion_request(cx);
|
||||
messages = request.messages;
|
||||
}
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let request = self.request_for_inline_assist(assist_id, cx);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let prompt = prompt.await?;
|
||||
let request = request.await?;
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn request_for_inline_assist(
|
||||
&self,
|
||||
assist_id: InlineAssistId,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<LanguageModelRequest>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let (user_prompt, context_request, project_name, buffer, range, model) = cx
|
||||
.read_global(|this: &InlineAssistant, cx: &WindowContext| {
|
||||
let assist = this.assists.get(&assist_id).context("invalid assist")?;
|
||||
let decorations = assist.decorations.as_ref().context("invalid assist")?;
|
||||
let editor = assist.editor.upgrade().context("invalid assist")?;
|
||||
let user_prompt = decorations.prompt_editor.read(cx).prompt(cx);
|
||||
let context_request = if assist.include_context {
|
||||
assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?.read(cx);
|
||||
let assistant_panel = workspace.panel::<AssistantPanel>(cx)?;
|
||||
Some(
|
||||
assistant_panel
|
||||
.read(cx)
|
||||
.active_context(cx)?
|
||||
.read(cx)
|
||||
.to_completion_request(cx),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let project_name = assist.workspace.as_ref().and_then(|workspace| {
|
||||
let workspace = workspace.upgrade()?;
|
||||
Some(
|
||||
workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_root_names(cx)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("/"),
|
||||
)
|
||||
});
|
||||
let buffer = editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||
let range = assist.codegen.read(cx).range.clone();
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
anyhow::Ok((
|
||||
user_prompt,
|
||||
context_request,
|
||||
project_name,
|
||||
buffer,
|
||||
range,
|
||||
model,
|
||||
))
|
||||
})??;
|
||||
|
||||
let language = buffer.language_at(range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
|
||||
None
|
||||
} else {
|
||||
Some(language.name())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Higher Temperature increases the randomness of model outputs.
|
||||
// If Markdown or No Language is Known, increase the randomness for more creative output
|
||||
// If Code, decrease temperature to get more deterministic outputs
|
||||
let temperature = if let Some(language) = language_name.clone() {
|
||||
if language.as_ref() == "Markdown" {
|
||||
1.0
|
||||
} else {
|
||||
0.5
|
||||
}
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
let prompt = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let language_name = language_name.as_deref();
|
||||
let start = buffer.point_to_buffer_offset(range.start);
|
||||
let end = buffer.point_to_buffer_offset(range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
let (start_buffer, start_buffer_offset) = start;
|
||||
let (end_buffer, end_buffer_offset) = end;
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
return Err(anyhow!("invalid transformation range"));
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow!("invalid transformation range"));
|
||||
};
|
||||
generate_content_prompt(user_prompt, language_name, buffer, range, project_name)
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
if let Some(context_request) = context_request {
|
||||
messages = context_request.messages;
|
||||
}
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
});
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
Ok(LanguageModelRequest {
|
||||
model,
|
||||
messages,
|
||||
stop: vec!["|END|>".to_string()],
|
||||
temperature,
|
||||
};
|
||||
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.start(request, cx))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
@@ -1142,6 +1170,7 @@ enum PromptEditorEvent {
|
||||
|
||||
struct PromptEditor {
|
||||
id: InlineAssistId,
|
||||
fs: Arc<dyn Fs>,
|
||||
height_in_lines: u8,
|
||||
editor: View<Editor>,
|
||||
edited_since_done: bool,
|
||||
@@ -1150,9 +1179,12 @@ struct PromptEditor {
|
||||
prompt_history_ix: Option<usize>,
|
||||
pending_prompt: String,
|
||||
codegen: Model<Codegen>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
_codegen_subscription: Subscription,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
pending_token_count: Task<Result<()>>,
|
||||
token_count: Option<usize>,
|
||||
_token_count_subscriptions: Vec<Subscription>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
}
|
||||
|
||||
impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
@@ -1160,6 +1192,7 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
impl Render for PromptEditor {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let gutter_dimensions = *self.gutter_dimensions.lock();
|
||||
let fs = self.fs.clone();
|
||||
|
||||
let buttons = match &self.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
@@ -1245,85 +1278,101 @@ impl Render for PromptEditor {
|
||||
}
|
||||
};
|
||||
|
||||
v_flex().h_full().w_full().justify_end().child(
|
||||
h_flex()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.py_1p5()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
.child(
|
||||
h_flex()
|
||||
.w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))
|
||||
// .pr(gutter_dimensions.fold_area_width())
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.children(self.workspace.clone().map(|workspace| {
|
||||
IconButton::new("context", IconName::Context)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
cx.listener(move |_, _, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
h_flex()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.py_1p5()
|
||||
.h_full()
|
||||
.w_full()
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
.child(
|
||||
h_flex()
|
||||
.w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.child(
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models(cx)
|
||||
{
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
move |_| {
|
||||
Label::new(model.display_name())
|
||||
.into_any_element()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = fs.clone();
|
||||
let model = model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
menu
|
||||
})
|
||||
.tooltip(move |cx| {
|
||||
let token_count = workspace.upgrade().and_then(|workspace| {
|
||||
let panel =
|
||||
workspace.read(cx).panel::<AssistantPanel>(cx)?;
|
||||
let context = panel.read(cx).active_context(cx)?;
|
||||
context.read(cx).token_count()
|
||||
});
|
||||
if let Some(token_count) = token_count {
|
||||
.into()
|
||||
})
|
||||
.trigger(
|
||||
IconButton::new("context", IconName::Settings)
|
||||
.size(ButtonSize::None)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"{} Additional Context Tokens from Assistant",
|
||||
token_count
|
||||
"Using {}",
|
||||
CompletionProvider::global(cx)
|
||||
.model()
|
||||
.display_name()
|
||||
),
|
||||
Some(&crate::ToggleFocus),
|
||||
"Click to open…",
|
||||
None,
|
||||
"Click to Change Model",
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Tooltip::for_action(
|
||||
"Toggle Assistant Panel",
|
||||
&crate::ToggleFocus,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
}))
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
let error_message = SharedString::from(error.to_string());
|
||||
Some(
|
||||
div()
|
||||
.id("error")
|
||||
.tooltip(move |cx| Tooltip::text(error_message.clone(), cx))
|
||||
.child(
|
||||
Icon::new(IconName::XCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Error),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(div().flex_1().child(self.render_prompt_editor(cx)))
|
||||
.child(h_flex().gap_2().pr_4().children(buttons)),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.anchor(gpui::AnchorCorner::BottomRight),
|
||||
)
|
||||
.children(
|
||||
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
|
||||
let error_message = SharedString::from(error.to_string());
|
||||
Some(
|
||||
div()
|
||||
.id("error")
|
||||
.tooltip(move |cx| Tooltip::text(error_message.clone(), cx))
|
||||
.child(
|
||||
Icon::new(IconName::XCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Error),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(div().flex_1().child(self.render_prompt_editor(cx)))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.pr_4()
|
||||
.children(self.render_token_count(cx))
|
||||
.children(buttons),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1336,13 +1385,17 @@ impl FocusableView for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
prompt_history: VecDeque<String>,
|
||||
prompt_buffer: Model<MultiBuffer>,
|
||||
codegen: Model<Codegen>,
|
||||
parent_editor: &View<Editor>,
|
||||
assistant_panel: Option<&View<AssistantPanel>>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let prompt_editor = cx.new_view(|cx| {
|
||||
@@ -1363,6 +1416,15 @@ impl PromptEditor {
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor
|
||||
});
|
||||
|
||||
let mut token_count_subscriptions = Vec::new();
|
||||
token_count_subscriptions
|
||||
.push(cx.subscribe(parent_editor, Self::handle_parent_editor_event));
|
||||
if let Some(assistant_panel) = assistant_panel {
|
||||
token_count_subscriptions
|
||||
.push(cx.subscribe(assistant_panel, Self::handle_assistant_panel_event));
|
||||
}
|
||||
|
||||
let mut this = Self {
|
||||
id,
|
||||
height_in_lines: 1,
|
||||
@@ -1375,9 +1437,14 @@ impl PromptEditor {
|
||||
_codegen_subscription: cx.observe(&codegen, Self::handle_codegen_changed),
|
||||
editor_subscriptions: Vec::new(),
|
||||
codegen,
|
||||
fs,
|
||||
pending_token_count: Task::ready(Ok(())),
|
||||
token_count: None,
|
||||
_token_count_subscriptions: token_count_subscriptions,
|
||||
workspace,
|
||||
};
|
||||
this.count_lines(cx);
|
||||
this.count_tokens(cx);
|
||||
this.subscribe_to_editor(cx);
|
||||
this
|
||||
}
|
||||
@@ -1436,6 +1503,47 @@ impl PromptEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_parent_editor_event(
|
||||
&mut self,
|
||||
_: View<Editor>,
|
||||
event: &EditorEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let EditorEvent::BufferEdited { .. } = event {
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_assistant_panel_event(
|
||||
&mut self,
|
||||
_: View<AssistantPanel>,
|
||||
event: &AssistantPanelEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let AssistantPanelEvent::ContextEdited { .. } = event;
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
|
||||
fn count_tokens(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let assist_id = self.id;
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let request = cx
|
||||
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
|
||||
inline_assistant.request_for_inline_assist(assist_id, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let token_count = cx
|
||||
.update(|cx| CompletionProvider::global(cx).count_tokens(request, cx))?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_prompt_editor_changed(&mut self, _: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
self.count_lines(cx);
|
||||
}
|
||||
@@ -1460,6 +1568,9 @@ impl PromptEditor {
|
||||
self.edited_since_done = true;
|
||||
cx.notify();
|
||||
}
|
||||
EditorEvent::BufferEdited => {
|
||||
self.count_tokens(cx);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -1551,6 +1662,63 @@ impl PromptEditor {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = CompletionProvider::global(cx).model();
|
||||
let token_count = self.token_count?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let token_count_color = if remaining_tokens <= 0 {
|
||||
Color::Error
|
||||
} else if token_count as f32 / max_token_count as f32 >= 0.8 {
|
||||
Color::Warning
|
||||
} else {
|
||||
Color::Muted
|
||||
};
|
||||
|
||||
let mut token_count = h_flex()
|
||||
.id("token_count")
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(humanize_token_count(token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(token_count_color),
|
||||
)
|
||||
.child(Label::new("/").size(LabelSize::Small).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(humanize_token_count(max_token_count))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
);
|
||||
if let Some(workspace) = self.workspace.clone() {
|
||||
token_count = token_count
|
||||
.tooltip(|cx| {
|
||||
Tooltip::with_meta(
|
||||
"Tokens Used by Inline Assistant",
|
||||
None,
|
||||
"Click to Open Assistant Panel",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.cursor_pointer()
|
||||
.on_mouse_down(gpui::MouseButton::Left, |_, cx| cx.stop_propagation())
|
||||
.on_click(move |_, cx| {
|
||||
cx.stop_propagation();
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(cx)
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
} else {
|
||||
token_count = token_count
|
||||
.cursor_default()
|
||||
.tooltip(|cx| Tooltip::text("Tokens Used by Inline Assistant", cx));
|
||||
}
|
||||
|
||||
Some(token_count)
|
||||
}
|
||||
|
||||
fn render_prompt_editor(&self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let text_style = TextStyle {
|
||||
@@ -1562,7 +1730,7 @@ impl PromptEditor {
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
font_size: rems(0.875).into(),
|
||||
font_weight: FontWeight::NORMAL,
|
||||
font_weight: settings.ui_font.weight,
|
||||
font_style: FontStyle::Normal,
|
||||
line_height: relative(1.3),
|
||||
background_color: None,
|
||||
@@ -1893,6 +2061,11 @@ impl Codegen {
|
||||
|
||||
if lines.peek().is_some() {
|
||||
hunks_tx.send(diff.push_new("\n")).await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ impl RenderOnce for ModelSelector {
|
||||
.with_handle(self.handle)
|
||||
.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for model in CompletionProvider::global(cx).available_models() {
|
||||
for model in CompletionProvider::global(cx).available_models(cx) {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let model = model.clone();
|
||||
|
||||
@@ -5,17 +5,17 @@ use crate::{
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorEvent};
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
use futures::{
|
||||
future::{self, BoxFuture, Shared},
|
||||
FutureExt,
|
||||
};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{
|
||||
actions, percentage, point, size, Animation, AnimationExt, AppContext, BackgroundExecutor,
|
||||
Bounds, EventEmitter, Global, PromptLevel, ReadGlobal, Subscription, Task, TitlebarOptions,
|
||||
Transformation, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
|
||||
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
};
|
||||
use heed::{types::SerdeBincode, Database, RoTxn};
|
||||
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
|
||||
@@ -34,7 +34,7 @@ use std::{
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
div, prelude::*, IconButtonShape, ListItem, ListItemSpacing, ParentElement, Render,
|
||||
SharedString, Styled, TitleBar, Tooltip, ViewContext, VisualContext,
|
||||
SharedString, Styled, Tooltip, ViewContext, VisualContext,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
@@ -42,7 +42,12 @@ use workspace::Workspace;
|
||||
|
||||
actions!(
|
||||
prompt_library,
|
||||
[NewPrompt, DeletePrompt, ToggleDefaultPrompt]
|
||||
[
|
||||
NewPrompt,
|
||||
DeletePrompt,
|
||||
DuplicatePrompt,
|
||||
ToggleDefaultPrompt
|
||||
]
|
||||
);
|
||||
|
||||
/// Init starts loading the PromptStore in the background and assigns
|
||||
@@ -109,12 +114,13 @@ pub struct PromptLibrary {
|
||||
}
|
||||
|
||||
struct PromptEditor {
|
||||
editor: View<Editor>,
|
||||
title_editor: View<Editor>,
|
||||
body_editor: View<Editor>,
|
||||
token_count: Option<usize>,
|
||||
pending_token_count: Task<Option<()>>,
|
||||
next_body_to_save: Option<Rope>,
|
||||
next_title_and_body_to_save: Option<(String, Rope)>,
|
||||
pending_save: Option<Task<Option<()>>>,
|
||||
_subscription: Subscription,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
struct PromptPickerDelegate {
|
||||
@@ -345,7 +351,8 @@ impl PromptLibrary {
|
||||
|
||||
let prompt_metadata = self.store.metadata(prompt_id).unwrap();
|
||||
let prompt_editor = self.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
let body = prompt_editor.editor.update(cx, |editor, cx| {
|
||||
let title = prompt_editor.title_editor.read(cx).text(cx);
|
||||
let body = prompt_editor.body_editor.update(cx, |editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
@@ -359,20 +366,24 @@ impl PromptLibrary {
|
||||
let store = self.store.clone();
|
||||
let executor = cx.background_executor().clone();
|
||||
|
||||
prompt_editor.next_body_to_save = Some(body);
|
||||
prompt_editor.next_title_and_body_to_save = Some((title, body));
|
||||
if prompt_editor.pending_save.is_none() {
|
||||
prompt_editor.pending_save = Some(cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
loop {
|
||||
let next_body_to_save = this.update(&mut cx, |this, _| {
|
||||
let title_and_body = this.update(&mut cx, |this, _| {
|
||||
this.prompt_editors
|
||||
.get_mut(&prompt_id)?
|
||||
.next_body_to_save
|
||||
.next_title_and_body_to_save
|
||||
.take()
|
||||
})?;
|
||||
|
||||
if let Some(body) = next_body_to_save {
|
||||
let title = title_from_body(body.chars_at(0));
|
||||
if let Some((title, body)) = title_and_body {
|
||||
let title = if title.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SharedString::from(title))
|
||||
};
|
||||
store
|
||||
.save(prompt_id, title, prompt_metadata.default, body)
|
||||
.await
|
||||
@@ -405,6 +416,12 @@ impl PromptLibrary {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn duplicate_active_prompt(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if let Some(active_prompt_id) = self.active_prompt_id {
|
||||
self.duplicate_prompt(active_prompt_id, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_default_for_active_prompt(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if let Some(active_prompt_id) = self.active_prompt_id {
|
||||
self.toggle_default_for_prompt(active_prompt_id, cx);
|
||||
@@ -425,11 +442,11 @@ impl PromptLibrary {
|
||||
if let Some(prompt_editor) = self.prompt_editors.get(&prompt_id) {
|
||||
if focus {
|
||||
prompt_editor
|
||||
.editor
|
||||
.body_editor
|
||||
.update(cx, |editor, cx| editor.focus(cx));
|
||||
}
|
||||
self.set_active_prompt(Some(prompt_id), cx);
|
||||
} else {
|
||||
} else if let Some(prompt_metadata) = self.store.metadata(prompt_id) {
|
||||
let language_registry = self.language_registry.clone();
|
||||
let commands = SlashCommandRegistry::global(cx);
|
||||
let prompt = self.store.load(prompt_id);
|
||||
@@ -438,13 +455,20 @@ impl PromptLibrary {
|
||||
let markdown = language_registry.language_for_name("Markdown").await;
|
||||
this.update(&mut cx, |this, cx| match prompt {
|
||||
Ok(prompt) => {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::local(prompt, cx);
|
||||
buffer.set_language(markdown.log_err(), cx);
|
||||
buffer.set_language_registry(language_registry);
|
||||
buffer
|
||||
let title_editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::auto_width(cx);
|
||||
editor.set_placeholder_text("Untitled", cx);
|
||||
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
|
||||
editor
|
||||
});
|
||||
let editor = cx.new_view(|cx| {
|
||||
let body_editor = cx.new_view(|cx| {
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = Buffer::local(prompt, cx);
|
||||
buffer.set_language(markdown.log_err(), cx);
|
||||
buffer.set_language_registry(language_registry);
|
||||
buffer
|
||||
});
|
||||
|
||||
let mut editor = Editor::for_buffer(buffer, None, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
@@ -460,19 +484,24 @@ impl PromptLibrary {
|
||||
}
|
||||
editor
|
||||
});
|
||||
let _subscription =
|
||||
cx.subscribe(&editor, move |this, _editor, event, cx| {
|
||||
this.handle_prompt_editor_event(prompt_id, event, cx)
|
||||
});
|
||||
let _subscriptions = vec![
|
||||
cx.subscribe(&title_editor, move |this, editor, event, cx| {
|
||||
this.handle_prompt_title_editor_event(prompt_id, editor, event, cx)
|
||||
}),
|
||||
cx.subscribe(&body_editor, move |this, editor, event, cx| {
|
||||
this.handle_prompt_body_editor_event(prompt_id, editor, event, cx)
|
||||
}),
|
||||
];
|
||||
this.prompt_editors.insert(
|
||||
prompt_id,
|
||||
PromptEditor {
|
||||
editor,
|
||||
next_body_to_save: None,
|
||||
title_editor,
|
||||
body_editor,
|
||||
next_title_and_body_to_save: None,
|
||||
pending_save: None,
|
||||
token_count: None,
|
||||
pending_token_count: Task::ready(None),
|
||||
_subscription,
|
||||
_subscriptions,
|
||||
},
|
||||
);
|
||||
this.set_active_prompt(Some(prompt_id), cx);
|
||||
@@ -546,10 +575,51 @@ impl PromptLibrary {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn duplicate_prompt(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
|
||||
if let Some(prompt) = self.prompt_editors.get(&prompt_id) {
|
||||
const DUPLICATE_SUFFIX: &str = " copy";
|
||||
let title_to_duplicate = prompt.title_editor.read(cx).text(cx);
|
||||
let existing_titles = self
|
||||
.prompt_editors
|
||||
.iter()
|
||||
.filter(|&(&id, _)| id != prompt_id)
|
||||
.map(|(_, prompt_editor)| prompt_editor.title_editor.read(cx).text(cx))
|
||||
.filter(|title| title.starts_with(&title_to_duplicate))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let title = if existing_titles.is_empty() {
|
||||
title_to_duplicate + DUPLICATE_SUFFIX
|
||||
} else {
|
||||
let mut i = 1;
|
||||
loop {
|
||||
let new_title = format!("{title_to_duplicate}{DUPLICATE_SUFFIX} {i}");
|
||||
if !existing_titles.contains(&new_title) {
|
||||
break new_title;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
};
|
||||
|
||||
let new_id = PromptId::new();
|
||||
let body = prompt.body_editor.read(cx).text(cx);
|
||||
let save = self
|
||||
.store
|
||||
.save(new_id, Some(title.into()), false, body.into());
|
||||
self.picker.update(cx, |picker, cx| picker.refresh(cx));
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
save.await?;
|
||||
this.update(&mut cx, |prompt_library, cx| {
|
||||
prompt_library.load_prompt(new_id, true, cx)
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
fn focus_active_prompt(&mut self, _: &Tab, cx: &mut ViewContext<Self>) {
|
||||
if let Some(active_prompt) = self.active_prompt_id {
|
||||
self.prompt_editors[&active_prompt]
|
||||
.editor
|
||||
.body_editor
|
||||
.update(cx, |editor, cx| editor.focus(cx));
|
||||
cx.stop_propagation();
|
||||
}
|
||||
@@ -565,11 +635,11 @@ impl PromptLibrary {
|
||||
return;
|
||||
};
|
||||
|
||||
let prompt_editor = &self.prompt_editors[&active_prompt_id].editor;
|
||||
let prompt_editor = &self.prompt_editors[&active_prompt_id].body_editor;
|
||||
let provider = CompletionProvider::global(cx);
|
||||
if provider.is_authenticated() {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
assistant.assist(&prompt_editor, None, false, cx)
|
||||
assistant.assist(&prompt_editor, None, None, cx)
|
||||
})
|
||||
} else {
|
||||
for window in cx.windows() {
|
||||
@@ -589,50 +659,73 @@ impl PromptLibrary {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_prompt_editor_event(
|
||||
fn move_down_from_title(&mut self, _: &editor::actions::MoveDown, cx: &mut ViewContext<Self>) {
|
||||
if let Some(prompt_id) = self.active_prompt_id {
|
||||
if let Some(prompt_editor) = self.prompt_editors.get(&prompt_id) {
|
||||
cx.focus_view(&prompt_editor.body_editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn move_up_from_body(&mut self, _: &editor::actions::MoveUp, cx: &mut ViewContext<Self>) {
|
||||
if let Some(prompt_id) = self.active_prompt_id {
|
||||
if let Some(prompt_editor) = self.prompt_editors.get(&prompt_id) {
|
||||
cx.focus_view(&prompt_editor.title_editor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_prompt_title_editor_event(
|
||||
&mut self,
|
||||
prompt_id: PromptId,
|
||||
title_editor: View<Editor>,
|
||||
event: &EditorEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let EditorEvent::BufferEdited = event {
|
||||
let prompt_editor = self.prompt_editors.get(&prompt_id).unwrap();
|
||||
let buffer = prompt_editor
|
||||
.editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap();
|
||||
match event {
|
||||
EditorEvent::BufferEdited => {
|
||||
self.save_prompt(prompt_id, cx);
|
||||
self.count_tokens(prompt_id, cx);
|
||||
}
|
||||
EditorEvent::Blurred => {
|
||||
title_editor.update(cx, |title_editor, cx| {
|
||||
title_editor.change_selections(None, cx, |selections| {
|
||||
let cursor = selections.oldest_anchor().head();
|
||||
selections.select_anchor_ranges([cursor..cursor]);
|
||||
});
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let mut chars = buffer.chars_at(0);
|
||||
match chars.next() {
|
||||
Some('#') => {
|
||||
if chars.next() != Some(' ') {
|
||||
drop(chars);
|
||||
buffer.edit([(1..1, " ")], None, cx);
|
||||
}
|
||||
}
|
||||
Some(' ') => {
|
||||
drop(chars);
|
||||
buffer.edit([(0..0, "#")], None, cx);
|
||||
}
|
||||
_ => {
|
||||
drop(chars);
|
||||
buffer.edit([(0..0, "# ")], None, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
self.save_prompt(prompt_id, cx);
|
||||
self.count_tokens(prompt_id, cx);
|
||||
fn handle_prompt_body_editor_event(
|
||||
&mut self,
|
||||
prompt_id: PromptId,
|
||||
body_editor: View<Editor>,
|
||||
event: &EditorEvent,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
match event {
|
||||
EditorEvent::BufferEdited => {
|
||||
self.save_prompt(prompt_id, cx);
|
||||
self.count_tokens(prompt_id, cx);
|
||||
}
|
||||
EditorEvent::Blurred => {
|
||||
body_editor.update(cx, |body_editor, cx| {
|
||||
body_editor.change_selections(None, cx, |selections| {
|
||||
let cursor = selections.oldest_anchor().head();
|
||||
selections.select_anchor_ranges([cursor..cursor]);
|
||||
});
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn count_tokens(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
|
||||
if let Some(prompt) = self.prompt_editors.get_mut(&prompt_id) {
|
||||
let editor = &prompt.editor.read(cx);
|
||||
let editor = &prompt.body_editor.read(cx);
|
||||
let buffer = &editor.buffer().read(cx).as_singleton().unwrap().read(cx);
|
||||
let body = buffer.as_rope().clone();
|
||||
prompt.pending_token_count = cx.spawn(|this, mut cx| {
|
||||
@@ -680,7 +773,7 @@ impl PromptLibrary {
|
||||
.child(
|
||||
h_flex()
|
||||
.p(Spacing::Small.rems(cx))
|
||||
.h(TitleBar::height(cx))
|
||||
.h_9()
|
||||
.w_full()
|
||||
.flex_none()
|
||||
.justify_end()
|
||||
@@ -708,122 +801,218 @@ impl PromptLibrary {
|
||||
.flex_none()
|
||||
.min_w_64()
|
||||
.children(self.active_prompt_id.and_then(|prompt_id| {
|
||||
let buffer_font = ThemeSettings::get_global(cx).buffer_font.family.clone();
|
||||
let prompt_metadata = self.store.metadata(prompt_id)?;
|
||||
let prompt_editor = &self.prompt_editors[&prompt_id];
|
||||
let focus_handle = prompt_editor.editor.focus_handle(cx);
|
||||
let focus_handle = prompt_editor.body_editor.focus_handle(cx);
|
||||
let current_model = CompletionProvider::global(cx).model();
|
||||
let token_count = prompt_editor.token_count.map(|count| count.to_string());
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
v_flex()
|
||||
.id("prompt-editor-inner")
|
||||
.size_full()
|
||||
.items_start()
|
||||
.relative()
|
||||
.overflow_hidden()
|
||||
.pl(Spacing::XXLarge.rems(cx))
|
||||
.pt(Spacing::Large.rems(cx))
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.focus(&focus_handle);
|
||||
}))
|
||||
.child(
|
||||
div()
|
||||
.on_action(cx.listener(Self::focus_picker))
|
||||
.on_action(cx.listener(Self::inline_assist))
|
||||
.flex_grow()
|
||||
.h_full()
|
||||
.pt(Spacing::XXLarge.rems(cx))
|
||||
.pl(Spacing::XXLarge.rems(cx))
|
||||
.child(prompt_editor.editor.clone()),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.w_12()
|
||||
.py(Spacing::Large.rems(cx))
|
||||
.justify_start()
|
||||
.items_end()
|
||||
.gap_1()
|
||||
.child(h_flex().h_8().font_family(buffer_font).when_some_else(
|
||||
token_count,
|
||||
|tokens_ready, token_count| {
|
||||
tokens_ready.pr_3().justify_end().child(
|
||||
// This isn't actually a button, it just let's us easily add
|
||||
// a tooltip to the token count.
|
||||
Button::new("token_count", token_count.clone())
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
format!("{} tokens", token_count,),
|
||||
None,
|
||||
format!(
|
||||
"Model: {}",
|
||||
current_model.display_name()
|
||||
),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
)
|
||||
},
|
||||
|tokens_loading| {
|
||||
tokens_loading.w_12().justify_center().child(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(4)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(
|
||||
percentage(delta),
|
||||
))
|
||||
},
|
||||
),
|
||||
)
|
||||
},
|
||||
))
|
||||
h_flex()
|
||||
.group("active-editor-header")
|
||||
.pr(Spacing::XXLarge.rems(cx))
|
||||
.pt(Spacing::XSmall.rems(cx))
|
||||
.pb(Spacing::Large.rems(cx))
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex().justify_center().w_12().h_8().child(
|
||||
IconButton::new("toggle-default-prompt", IconName::Sparkle)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.selected(prompt_metadata.default)
|
||||
.selected_icon(IconName::SparkleFilled)
|
||||
.icon_color(if prompt_metadata.default {
|
||||
Color::Accent
|
||||
} else {
|
||||
Color::Muted
|
||||
})
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
if prompt_metadata.default {
|
||||
"Remove from Default Prompt"
|
||||
} else {
|
||||
"Add to Default Prompt"
|
||||
},
|
||||
cx,
|
||||
h_flex().gap_1().child(
|
||||
div()
|
||||
.max_w_80()
|
||||
.on_action(cx.listener(Self::move_down_from_title))
|
||||
.border_1()
|
||||
.border_color(transparent_black())
|
||||
.rounded_md()
|
||||
.group_hover("active-editor-header", |this| {
|
||||
this.border_color(
|
||||
cx.theme().colors().border_variant,
|
||||
)
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(ToggleDefaultPrompt));
|
||||
}),
|
||||
.child(EditorElement::new(
|
||||
&prompt_editor.title_editor,
|
||||
EditorStyle {
|
||||
background: cx.theme().system().transparent,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: TextStyle {
|
||||
color: cx
|
||||
.theme()
|
||||
.colors()
|
||||
.editor_foreground,
|
||||
font_family: settings
|
||||
.ui_font
|
||||
.family
|
||||
.clone(),
|
||||
font_features: settings
|
||||
.ui_font
|
||||
.features
|
||||
.clone(),
|
||||
font_size: HeadlineSize::Large
|
||||
.size()
|
||||
.into(),
|
||||
font_weight: settings.ui_font.weight,
|
||||
line_height: relative(
|
||||
settings.buffer_line_height.value(),
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
scrollbar_width: Pixels::ZERO,
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
status: cx.theme().status().clone(),
|
||||
inlay_hints_style: HighlightStyle {
|
||||
color: Some(cx.theme().status().hint),
|
||||
..HighlightStyle::default()
|
||||
},
|
||||
suggestions_style: HighlightStyle {
|
||||
color: Some(cx.theme().status().predictive),
|
||||
..HighlightStyle::default()
|
||||
},
|
||||
},
|
||||
)),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().justify_center().w_12().h_8().child(
|
||||
IconButton::new("delete-prompt", IconName::Trash)
|
||||
.size(ButtonSize::Large)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::for_action(
|
||||
"Delete Prompt",
|
||||
&DeletePrompt,
|
||||
cx,
|
||||
h_flex()
|
||||
.h_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.h_full()
|
||||
.gap(Spacing::XXLarge.rems(cx))
|
||||
.child(div()),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.h_full()
|
||||
.gap(Spacing::XXLarge.rems(cx))
|
||||
.children(prompt_editor.token_count.map(
|
||||
|token_count| {
|
||||
let token_count: SharedString =
|
||||
token_count.to_string().into();
|
||||
let label_token_count: SharedString =
|
||||
token_count.to_string().into();
|
||||
|
||||
h_flex()
|
||||
.id("token_count")
|
||||
.tooltip(move |cx| {
|
||||
let token_count =
|
||||
token_count.clone();
|
||||
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"{} tokens",
|
||||
token_count.clone()
|
||||
),
|
||||
None,
|
||||
format!(
|
||||
"Model: {}",
|
||||
current_model
|
||||
.display_name()
|
||||
),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} tokens",
|
||||
label_token_count.clone()
|
||||
))
|
||||
.color(Color::Muted),
|
||||
)
|
||||
},
|
||||
))
|
||||
.child(
|
||||
IconButton::new(
|
||||
"delete-prompt",
|
||||
IconName::Trash,
|
||||
)
|
||||
.size(ButtonSize::Large)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.shape(IconButtonShape::Square)
|
||||
.size(ButtonSize::Large)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::for_action(
|
||||
"Delete Prompt",
|
||||
&DeletePrompt,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(DeletePrompt));
|
||||
}),
|
||||
)
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(DeletePrompt));
|
||||
}),
|
||||
),
|
||||
.child(
|
||||
IconButton::new(
|
||||
"duplicate-prompt",
|
||||
IconName::BookCopy,
|
||||
)
|
||||
.size(ButtonSize::Large)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.shape(IconButtonShape::Square)
|
||||
.size(ButtonSize::Large)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::for_action(
|
||||
"Duplicate Prompt",
|
||||
&DuplicatePrompt,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(
|
||||
DuplicatePrompt,
|
||||
));
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(
|
||||
"toggle-default-prompt",
|
||||
IconName::Sparkle,
|
||||
)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.selected(prompt_metadata.default)
|
||||
.selected_icon(IconName::SparkleFilled)
|
||||
.icon_color(if prompt_metadata.default {
|
||||
Color::Accent
|
||||
} else {
|
||||
Color::Muted
|
||||
})
|
||||
.shape(IconButtonShape::Square)
|
||||
.size(ButtonSize::Large)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::text(
|
||||
if prompt_metadata.default {
|
||||
"Remove from Default Prompt"
|
||||
} else {
|
||||
"Add to Default Prompt"
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(
|
||||
ToggleDefaultPrompt,
|
||||
));
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.on_action(cx.listener(Self::focus_picker))
|
||||
.on_action(cx.listener(Self::inline_assist))
|
||||
.on_action(cx.listener(Self::move_up_from_body))
|
||||
.flex_grow()
|
||||
.h_full()
|
||||
.child(prompt_editor.body_editor.clone()),
|
||||
),
|
||||
)
|
||||
}))
|
||||
@@ -840,6 +1029,7 @@ impl Render for PromptLibrary {
|
||||
.key_context("PromptLibrary")
|
||||
.on_action(cx.listener(|this, &NewPrompt, cx| this.new_prompt(cx)))
|
||||
.on_action(cx.listener(|this, &DeletePrompt, cx| this.delete_active_prompt(cx)))
|
||||
.on_action(cx.listener(|this, &DuplicatePrompt, cx| this.duplicate_active_prompt(cx)))
|
||||
.on_action(cx.listener(|this, &ToggleDefaultPrompt, cx| {
|
||||
this.toggle_default_for_active_prompt(cx)
|
||||
}))
|
||||
@@ -1115,24 +1305,3 @@ pub struct GlobalPromptStore(
|
||||
);
|
||||
|
||||
impl Global for GlobalPromptStore {}
|
||||
|
||||
fn title_from_body(body: impl IntoIterator<Item = char>) -> Option<SharedString> {
|
||||
let mut chars = body.into_iter().take_while(|c| *c != '\n').peekable();
|
||||
|
||||
let mut level = 0;
|
||||
while let Some('#') = chars.peek() {
|
||||
level += 1;
|
||||
chars.next();
|
||||
}
|
||||
|
||||
if level > 0 {
|
||||
let title = chars.collect::<String>().trim().to_string();
|
||||
if title.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(title.into())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,118 +6,130 @@ pub fn generate_content_prompt(
|
||||
language_name: Option<&str>,
|
||||
buffer: BufferSnapshot,
|
||||
range: Range<usize>,
|
||||
project_name: Option<String>,
|
||||
_project_name: Option<String>,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut prompt = String::new();
|
||||
|
||||
let content_type = match language_name {
|
||||
None | Some("Markdown" | "Plain Text") => {
|
||||
writeln!(prompt, "You are an expert engineer.")?;
|
||||
"Text"
|
||||
}
|
||||
Some(language_name) => {
|
||||
writeln!(prompt, "You are an expert {language_name} engineer.")?;
|
||||
writeln!(
|
||||
prompt,
|
||||
"Your answer MUST always and only be valid {}.",
|
||||
language_name
|
||||
"Here's a file of text that I'm going to ask you to make an edit to."
|
||||
)?;
|
||||
"Code"
|
||||
"text"
|
||||
}
|
||||
Some(language_name) => {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Here's a file of {language_name} that I'm going to ask you to make an edit to."
|
||||
)?;
|
||||
"code"
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(project_name) = project_name {
|
||||
writeln!(
|
||||
prompt,
|
||||
"You are currently working inside the '{project_name}' project in code editor Zed."
|
||||
)?;
|
||||
}
|
||||
|
||||
writeln!(
|
||||
prompt,
|
||||
"The user has the following file open in the editor:"
|
||||
)?;
|
||||
const MAX_CTX: usize = 50000;
|
||||
let mut is_truncated = false;
|
||||
if range.is_empty() {
|
||||
write!(prompt, "```")?;
|
||||
if let Some(language_name) = language_name {
|
||||
write!(prompt, "{language_name}")?;
|
||||
}
|
||||
|
||||
for chunk in buffer.as_rope().chunks_in_range(0..range.start) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
prompt.push_str("<|CURSOR|>");
|
||||
for chunk in buffer.as_rope().chunks_in_range(range.start..buffer.len()) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
if !prompt.ends_with('\n') {
|
||||
prompt.push('\n');
|
||||
}
|
||||
writeln!(prompt, "```")?;
|
||||
prompt.push('\n');
|
||||
|
||||
writeln!(
|
||||
prompt,
|
||||
"Assume the cursor is located where the `<|CURSOR|>` span is."
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"{content_type} can't be replaced, so assume your answer will be inserted at the cursor.",
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the users prompt: {user_prompt}",
|
||||
)
|
||||
.unwrap();
|
||||
prompt.push_str("The point you'll need to insert at is marked with <insert_here></insert_here>.\n\n<document>");
|
||||
} else {
|
||||
write!(prompt, "```")?;
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
if !prompt.ends_with('\n') {
|
||||
prompt.push('\n');
|
||||
}
|
||||
writeln!(prompt, "```")?;
|
||||
prompt.push('\n');
|
||||
|
||||
writeln!(
|
||||
prompt,
|
||||
"In particular, the following piece of text is selected:"
|
||||
)?;
|
||||
write!(prompt, "```")?;
|
||||
if let Some(language_name) = language_name {
|
||||
write!(prompt, "{language_name}")?;
|
||||
}
|
||||
prompt.push('\n');
|
||||
prompt.push_str("The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.\n\n<document>");
|
||||
}
|
||||
// Include file content.
|
||||
let before_range = 0..range.start;
|
||||
let truncated_before = if before_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
range.start - MAX_CTX..range.start
|
||||
} else {
|
||||
before_range
|
||||
};
|
||||
let mut non_rewrite_len = truncated_before.len();
|
||||
for chunk in buffer.text_for_range(truncated_before) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
if !range.is_empty() {
|
||||
prompt.push_str("<rewrite_this>\n");
|
||||
for chunk in buffer.text_for_range(range.clone()) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
if !prompt.ends_with('\n') {
|
||||
prompt.push('\n');
|
||||
}
|
||||
writeln!(prompt, "```")?;
|
||||
prompt.push('\n');
|
||||
|
||||
writeln!(
|
||||
prompt,
|
||||
"Modify the user's selected {content_type} based upon the users prompt: {user_prompt}"
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"You must reply with only the adjusted {content_type}, not the entire file."
|
||||
)
|
||||
.unwrap();
|
||||
prompt.push_str("\n<rewrite_this>");
|
||||
} else {
|
||||
prompt.push_str("<insert_here></insert_here>");
|
||||
}
|
||||
let after_range = range.end..buffer.len();
|
||||
let truncated_after = if after_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
range.end..range.end + MAX_CTX
|
||||
} else {
|
||||
after_range
|
||||
};
|
||||
non_rewrite_len += truncated_after.len();
|
||||
for chunk in buffer.text_for_range(truncated_after) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
|
||||
writeln!(prompt, "Never make remarks about the output.").unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Do not return anything else, except the generated {content_type}."
|
||||
)
|
||||
.unwrap();
|
||||
write!(prompt, "</document>\n\n").unwrap();
|
||||
|
||||
if is_truncated {
|
||||
writeln!(prompt, "The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.\n")?;
|
||||
}
|
||||
|
||||
if range.is_empty() {
|
||||
writeln!(
|
||||
prompt,
|
||||
"You can't replace {content_type}, your answer will be inserted in place of the `<insert_here></insert_here>` tags. Don't include the insert_here tags in your output.",
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the following prompt:\n\n<prompt>\n{user_prompt}\n</prompt>",
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(prompt, "Match the indentation in the original file in the inserted {content_type}, don't include any indentation on blank lines.\n").unwrap();
|
||||
prompt.push_str("Immediately start with the following format with no remarks:\n\n```\n{{INSERTED_CODE}}\n```");
|
||||
} else {
|
||||
writeln!(prompt, "Edit the section of {content_type} in <rewrite_this></rewrite_this> tags based on the following prompt:'").unwrap();
|
||||
writeln!(prompt, "\n<prompt>\n{user_prompt}\n</prompt>\n").unwrap();
|
||||
let rewrite_len = range.end - range.start;
|
||||
if rewrite_len < 20000 && rewrite_len * 2 < non_rewrite_len {
|
||||
writeln!(prompt, "And here's the section to rewrite based on that prompt again for reference:\n\n<rewrite_this>\n").unwrap();
|
||||
for chunk in buffer.text_for_range(range.clone()) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
writeln!(prompt, "\n</rewrite_this>\n").unwrap();
|
||||
}
|
||||
writeln!(prompt, "Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {content_type} will be preserved.\n").unwrap();
|
||||
write!(
|
||||
prompt,
|
||||
"Start at the indentation level in the original file in the rewritten {content_type}. "
|
||||
)
|
||||
.unwrap();
|
||||
prompt.push_str("Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.");
|
||||
prompt.push_str("\n\nImmediately start with the following format with no remarks:\n\n```\n{{REWRITTEN_CODE}}\n```");
|
||||
}
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
pub fn generate_terminal_assistant_prompt(
|
||||
user_prompt: &str,
|
||||
shell: Option<&str>,
|
||||
working_directory: Option<&str>,
|
||||
) -> String {
|
||||
let mut prompt = String::new();
|
||||
writeln!(&mut prompt, "You are an expert terminal user.").unwrap();
|
||||
writeln!(&mut prompt, "You will be given a description of a command and you need to respond with a command that matches the description.").unwrap();
|
||||
writeln!(&mut prompt, "Do not include markdown blocks or any other text formatting in your response, always respond with a single command that can be executed in the given shell.").unwrap();
|
||||
if let Some(shell) = shell {
|
||||
writeln!(&mut prompt, "Current shell is '{shell}'.").unwrap();
|
||||
}
|
||||
if let Some(working_directory) = working_directory {
|
||||
writeln!(
|
||||
&mut prompt,
|
||||
"Current working directory is '{working_directory}'."
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
writeln!(&mut prompt, "Here is the description of the command:").unwrap();
|
||||
prompt.push_str(user_prompt);
|
||||
prompt
|
||||
}
|
||||
|
||||
@@ -3,10 +3,10 @@ use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{SlashCommand, SlashCommandOutput, SlashCommandOutputSection};
|
||||
use chrono::{DateTime, Local};
|
||||
use chrono::Local;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::{prelude::*, ButtonLike, ElevationIndex};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct NowSlashCommand;
|
||||
@@ -46,7 +46,7 @@ impl SlashCommand for NowSlashCommand {
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let now = Local::now();
|
||||
let text = format!("Today is {now}.", now = now.to_rfc3339());
|
||||
let text = format!("Today is {now}.", now = now.to_rfc2822());
|
||||
let range = 0..text.len();
|
||||
|
||||
Task::ready(Ok(SlashCommandOutput {
|
||||
@@ -54,29 +54,9 @@ impl SlashCommand for NowSlashCommand {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::CountdownTimer,
|
||||
label: now.to_rfc3339().into(),
|
||||
label: now.to_rfc2822().into(),
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(IntoElement)]
|
||||
struct NowPlaceholder {
|
||||
pub id: ElementId,
|
||||
pub unfold: Arc<dyn Fn(&mut WindowContext)>,
|
||||
pub now: DateTime<Local>,
|
||||
}
|
||||
|
||||
impl RenderOnce for NowPlaceholder {
|
||||
fn render(self, _cx: &mut WindowContext) -> impl IntoElement {
|
||||
let unfold = self.unfold;
|
||||
|
||||
ButtonLike::new(self.id)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ElevatedSurface)
|
||||
.child(Icon::new(IconName::CountdownTimer))
|
||||
.child(Label::new(self.now.to_rfc3339()))
|
||||
.on_click(move |_, cx| unfold(cx))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,12 @@ use fs::Fs;
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{AppContext, Model, Task, WeakView};
|
||||
use http::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use indexed_docs::{
|
||||
convert_rustdoc_to_markdown, IndexedDocsRegistry, IndexedDocsStore, LocalProvider, PackageName,
|
||||
ProviderId, RustdocIndexer, RustdocSource,
|
||||
};
|
||||
use language::LspAdapterDelegate;
|
||||
use project::{Project, ProjectPath};
|
||||
use rustdoc::{convert_rustdoc_to_markdown, CrateName, LocalProvider, RustdocSource, RustdocStore};
|
||||
use ui::prelude::*;
|
||||
use util::{maybe, ResultExt};
|
||||
use workspace::Workspace;
|
||||
@@ -21,7 +24,7 @@ impl RustdocSlashCommand {
|
||||
async fn build_message(
|
||||
fs: Arc<dyn Fs>,
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
crate_name: CrateName,
|
||||
crate_name: PackageName,
|
||||
module_path: Vec<String>,
|
||||
path_to_cargo_toml: Option<&Path>,
|
||||
) -> Result<(RustdocSource, String)> {
|
||||
@@ -87,6 +90,42 @@ impl RustdocSlashCommand {
|
||||
project.read(cx).absolute_path(&path, cx)?.as_path(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Ensures that the rustdoc provider is registered.
|
||||
///
|
||||
/// Ideally we would do this sooner, but we need to wait until we're able to
|
||||
/// access the workspace so we can read the project.
|
||||
fn ensure_rustdoc_provider_is_registered(
|
||||
&self,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
|
||||
if indexed_docs_registry
|
||||
.get_provider_store(ProviderId::rustdoc())
|
||||
.is_none()
|
||||
{
|
||||
let index_provider_deps = maybe!({
|
||||
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let cargo_workspace_root = Self::path_to_cargo_toml(project, cx)
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf()))
|
||||
.ok_or_else(|| anyhow!("no Cargo workspace root found"))?;
|
||||
|
||||
anyhow::Ok((fs, cargo_workspace_root))
|
||||
});
|
||||
|
||||
if let Some((fs, cargo_workspace_root)) = index_provider_deps.log_err() {
|
||||
indexed_docs_registry.register_provider(Box::new(RustdocIndexer::new(Box::new(
|
||||
LocalProvider::new(fs, cargo_workspace_root),
|
||||
))));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for RustdocSlashCommand {
|
||||
@@ -113,30 +152,17 @@ impl SlashCommand for RustdocSlashCommand {
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<Vec<String>>> {
|
||||
let index_provider_deps = maybe!({
|
||||
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let cargo_workspace_root = Self::path_to_cargo_toml(project, cx)
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf()))
|
||||
.ok_or_else(|| anyhow!("no Cargo workspace root found"))?;
|
||||
self.ensure_rustdoc_provider_is_registered(workspace, cx);
|
||||
|
||||
anyhow::Ok((fs, cargo_workspace_root))
|
||||
});
|
||||
|
||||
let store = RustdocStore::global(cx);
|
||||
let store = IndexedDocsStore::try_global(ProviderId::rustdoc(), cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
let store = store?;
|
||||
|
||||
if let Some((crate_name, rest)) = query.split_once(':') {
|
||||
if rest.is_empty() {
|
||||
if let Some((fs, cargo_workspace_root)) = index_provider_deps.log_err() {
|
||||
let provider = Box::new(LocalProvider::new(fs, cargo_workspace_root));
|
||||
// We don't need to hold onto this task, as the `RustdocStore` will hold it
|
||||
// until it completes.
|
||||
let _ = store.clone().index(crate_name.into(), provider);
|
||||
}
|
||||
// We don't need to hold onto this task, as the `IndexedDocsStore` will hold it
|
||||
// until it completes.
|
||||
let _ = store.clone().index(crate_name.into());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,16 +195,17 @@ impl SlashCommand for RustdocSlashCommand {
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("missing crate name"))
|
||||
{
|
||||
Ok(crate_name) => CrateName::from(crate_name),
|
||||
Ok(crate_name) => PackageName::from(crate_name),
|
||||
Err(err) => return Task::ready(Err(err)),
|
||||
};
|
||||
let item_path = path_components.map(ToString::to_string).collect::<Vec<_>>();
|
||||
|
||||
let text = cx.background_executor().spawn({
|
||||
let rustdoc_store = RustdocStore::global(cx);
|
||||
let rustdoc_store = IndexedDocsStore::try_global(ProviderId::rustdoc(), cx);
|
||||
let crate_name = crate_name.clone();
|
||||
let item_path = item_path.clone();
|
||||
async move {
|
||||
let rustdoc_store = rustdoc_store?;
|
||||
let item_docs = rustdoc_store
|
||||
.load(
|
||||
crate_name.clone(),
|
||||
@@ -191,7 +218,7 @@ impl SlashCommand for RustdocSlashCommand {
|
||||
.await;
|
||||
|
||||
if let Ok(item_docs) = item_docs {
|
||||
anyhow::Ok((RustdocSource::Index, item_docs.docs().to_owned()))
|
||||
anyhow::Ok((RustdocSource::Index, item_docs.to_string()))
|
||||
} else {
|
||||
Self::build_message(
|
||||
fs,
|
||||
|
||||
1122
crates/assistant/src/terminal_inline_assistant.rs
Normal file
@@ -196,23 +196,24 @@ mod linux {
|
||||
impl Detect {
|
||||
pub fn detect(path: Option<&Path>) -> anyhow::Result<impl InstalledApp> {
|
||||
let path = if let Some(path) = path {
|
||||
path.to_path_buf().canonicalize()
|
||||
path.to_path_buf().canonicalize()?
|
||||
} else {
|
||||
let cli = env::current_exe()?;
|
||||
let dir = cli
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.ok_or_else(|| anyhow!("no parent path for cli"))?;
|
||||
|
||||
match dir.join("libexec").join("zed-editor").canonicalize() {
|
||||
Ok(path) => Ok(path),
|
||||
// In development cli and zed are in the ./target/ directory together
|
||||
Err(e) => match cli.parent().unwrap().join("zed").canonicalize() {
|
||||
Ok(path) if path != cli => Ok(path),
|
||||
_ => Err(e),
|
||||
},
|
||||
}
|
||||
}?;
|
||||
// libexec is the standard, lib/zed is for Arch (and other non-libexec distros),
|
||||
// ./zed is for the target directory in development builds.
|
||||
let possible_locations =
|
||||
["../libexec/zed-editor", "../lib/zed/zed-editor", "./zed"];
|
||||
possible_locations
|
||||
.iter()
|
||||
.find_map(|p| dir.join(p).canonicalize().ok().filter(|path| path != &cli))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("could not find any of: {}", possible_locations.join(", "))
|
||||
})?
|
||||
};
|
||||
|
||||
Ok(App(path))
|
||||
}
|
||||
|
||||
@@ -202,6 +202,10 @@ impl Telemetry {
|
||||
event_coalescer: EventCoalescer::new(clock.clone()),
|
||||
max_queue_size: MAX_QUEUE_LEN,
|
||||
worktree_id_map: WorktreeIdMap(HashMap::from_iter([
|
||||
(
|
||||
"pnpm-lock.yaml".to_string(),
|
||||
ProjectCache::new("pnpm".to_string()),
|
||||
),
|
||||
(
|
||||
"yarn.lock".to_string(),
|
||||
ProjectCache::new("yarn".to_string()),
|
||||
@@ -611,6 +615,7 @@ impl Telemetry {
|
||||
|
||||
let request_body = EventRequestBody {
|
||||
installation_id: state.installation_id.as_deref().map(Into::into),
|
||||
metrics_id: state.metrics_id.as_deref().map(Into::into),
|
||||
session_id: state.session_id.clone(),
|
||||
is_staff: state.is_staff,
|
||||
app_version: state.app_version.clone(),
|
||||
|
||||
@@ -87,51 +87,27 @@ impl Global {
|
||||
}
|
||||
|
||||
pub fn observed_any(&self, other: &Self) -> bool {
|
||||
let mut lhs = self.0.iter();
|
||||
let mut rhs = other.0.iter();
|
||||
loop {
|
||||
if let Some(left) = lhs.next() {
|
||||
if let Some(right) = rhs.next() {
|
||||
if *right > 0 && left >= right {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
self.0
|
||||
.iter()
|
||||
.zip(other.0.iter())
|
||||
.any(|(left, right)| *right > 0 && left >= right)
|
||||
}
|
||||
|
||||
pub fn observed_all(&self, other: &Self) -> bool {
|
||||
let mut lhs = self.0.iter();
|
||||
let mut rhs = other.0.iter();
|
||||
loop {
|
||||
if let Some(left) = lhs.next() {
|
||||
if let Some(right) = rhs.next() {
|
||||
if left < right {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return rhs.next().is_none();
|
||||
}
|
||||
}
|
||||
self.0.iter().all(|left| match rhs.next() {
|
||||
Some(right) => left >= right,
|
||||
None => true,
|
||||
}) && rhs.next().is_none()
|
||||
}
|
||||
|
||||
pub fn changed_since(&self, other: &Self) -> bool {
|
||||
if self.0.len() > other.0.len() {
|
||||
return true;
|
||||
}
|
||||
for (left, right) in self.0.iter().zip(other.0.iter()) {
|
||||
if left > right {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
self.0.len() > other.0.len()
|
||||
|| self
|
||||
.0
|
||||
.iter()
|
||||
.zip(other.0.iter())
|
||||
.any(|(left, right)| left > right)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = Lamport> + '_ {
|
||||
|
||||
@@ -664,6 +664,7 @@ where
|
||||
#[derive(Serialize, Debug, clickhouse::Row)]
|
||||
pub struct EditorEventRow {
|
||||
installation_id: String,
|
||||
metrics_id: String,
|
||||
operation: String,
|
||||
app_version: String,
|
||||
file_extension: String,
|
||||
@@ -713,6 +714,7 @@ impl EditorEventRow {
|
||||
os_version: body.os_version.clone().unwrap_or_default(),
|
||||
architecture: body.architecture.clone(),
|
||||
installation_id: body.installation_id.clone().unwrap_or_default(),
|
||||
metrics_id: body.metrics_id.clone().unwrap_or_default(),
|
||||
session_id: body.session_id.clone(),
|
||||
is_staff: body.is_staff,
|
||||
time: time.timestamp_millis(),
|
||||
|
||||
@@ -53,9 +53,17 @@ async fn get_extensions(
|
||||
let extensions = if let Some(extension_ids) = extension_ids {
|
||||
app.db.get_extensions_by_ids(&extension_ids, None).await?
|
||||
} else {
|
||||
app.db
|
||||
let result = app
|
||||
.db
|
||||
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
|
||||
.await?
|
||||
.await?;
|
||||
|
||||
if let Some(query) = params.filter.as_deref() {
|
||||
let count = result.len();
|
||||
tracing::info!(query, count, "extension_search")
|
||||
}
|
||||
|
||||
result
|
||||
};
|
||||
|
||||
Ok(Json(GetExtensionsResponse { data: extensions }))
|
||||
|
||||
@@ -2583,14 +2583,13 @@ async fn rejoin_dev_server_projects(
|
||||
)
|
||||
.await?
|
||||
};
|
||||
notify_rejoined_projects(&mut rejoined_projects, &session)?;
|
||||
|
||||
response.send(proto::RejoinRemoteProjectsResponse {
|
||||
rejoined_projects: rejoined_projects
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|project| project.to_proto())
|
||||
.collect(),
|
||||
})
|
||||
})?;
|
||||
notify_rejoined_projects(&mut rejoined_projects, &session)
|
||||
}
|
||||
|
||||
async fn reconnect_dev_server(
|
||||
@@ -4463,6 +4462,7 @@ async fn complete_with_open_ai(
|
||||
tool_calls: choice
|
||||
.delta
|
||||
.tool_calls
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|delta| proto::ToolCallDelta {
|
||||
index: delta.index as u32,
|
||||
|
||||
@@ -73,6 +73,7 @@ impl ConnectionPool {
|
||||
pub fn reset(&mut self) {
|
||||
self.connections.clear();
|
||||
self.connected_users.clear();
|
||||
self.connected_dev_servers.clear();
|
||||
self.channels.clear();
|
||||
}
|
||||
|
||||
|
||||
@@ -504,6 +504,29 @@ async fn test_dev_server_reconnect(
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dev_server_restart(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppContext) {
|
||||
let (server, client1) = TestServer::start1(cx1).await;
|
||||
|
||||
let (_dev_server, remote_workspace) =
|
||||
create_dev_server_project(&server, client1.app_state.clone(), cx1, cx2).await;
|
||||
let cx = VisualTestContext::from_window(remote_workspace.into(), cx1).as_mut();
|
||||
|
||||
server.reset().await;
|
||||
cx.run_until_parked();
|
||||
|
||||
cx.simulate_keystrokes("cmd-p 1 enter");
|
||||
remote_workspace
|
||||
.update(cx, |ws, cx| {
|
||||
ws.active_item_as::<Editor>(cx)
|
||||
.unwrap()
|
||||
.update(cx, |ed, cx| {
|
||||
assert_eq!(ed.text(cx).to_string(), "remote\nremote\nremote");
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_dev_server_project_path_validation(
|
||||
cx1: &mut gpui::TestAppContext,
|
||||
|
||||
@@ -30,17 +30,13 @@ test-support = [
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
auto_update.workspace = true
|
||||
call.workspace = true
|
||||
channel.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
emojis.workspace = true
|
||||
extensions_ui.workspace = true
|
||||
feedback.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
@@ -51,8 +47,6 @@ notifications.workspace = true
|
||||
parking_lot.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
recent_projects.workspace = true
|
||||
dev_server_projects.workspace = true
|
||||
release_channel.workspace = true
|
||||
rich_text.workspace = true
|
||||
rpc.workspace = true
|
||||
@@ -64,14 +58,13 @@ settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
story = { workspace = true, optional = true }
|
||||
theme.workspace = true
|
||||
theme_selector.workspace = true
|
||||
time_format.workspace = true
|
||||
time.workspace = true
|
||||
title_bar.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
vcs_menu.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
call = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -2,10 +2,7 @@ mod channel_modal;
|
||||
mod contact_finder;
|
||||
|
||||
use self::channel_modal::ChannelModal;
|
||||
use crate::{
|
||||
channel_view::ChannelView, chat_panel::ChatPanel, face_pile::FacePile,
|
||||
CollaborationPanelSettings,
|
||||
};
|
||||
use crate::{channel_view::ChannelView, chat_panel::ChatPanel, CollaborationPanelSettings};
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, ProjectId, User, UserStore};
|
||||
@@ -34,7 +31,8 @@ use std::{mem, sync::Arc};
|
||||
use theme::{ActiveTheme, ThemeSettings};
|
||||
use ui::{
|
||||
prelude::*, tooltip_container, Avatar, AvatarAvailabilityIndicator, Button, Color, ContextMenu,
|
||||
Icon, IconButton, IconName, IconSize, Indicator, Label, ListHeader, ListItem, Tooltip,
|
||||
Facepile, Icon, IconButton, IconName, IconSize, Indicator, Label, ListHeader, ListItem,
|
||||
Tooltip,
|
||||
};
|
||||
use util::{maybe, ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
@@ -2542,7 +2540,7 @@ impl CollabPanel {
|
||||
None
|
||||
} else {
|
||||
let extra_count = participants.len().saturating_sub(FACEPILE_LIMIT);
|
||||
let result = FacePile::new(
|
||||
let result = Facepile::new(
|
||||
participants
|
||||
.iter()
|
||||
.map(|user| Avatar::new(user.avatar_uri.clone()).into_any_element())
|
||||
|
||||
@@ -1,20 +1,16 @@
|
||||
pub mod channel_view;
|
||||
pub mod chat_panel;
|
||||
pub mod collab_panel;
|
||||
mod collab_titlebar_item;
|
||||
mod face_pile;
|
||||
pub mod notification_panel;
|
||||
pub mod notifications;
|
||||
mod panel_settings;
|
||||
|
||||
use std::{rc::Rc, sync::Arc};
|
||||
|
||||
use call::{report_call_event_for_room, ActiveCall};
|
||||
pub use collab_panel::CollabPanel;
|
||||
pub use collab_titlebar_item::CollabTitlebarItem;
|
||||
use gpui::{
|
||||
actions, point, AppContext, Pixels, PlatformDisplay, Size, Task, WindowBackgroundAppearance,
|
||||
WindowBounds, WindowContext, WindowKind, WindowOptions,
|
||||
point, AppContext, Pixels, PlatformDisplay, Size, WindowBackgroundAppearance, WindowBounds,
|
||||
WindowDecorations, WindowKind, WindowOptions,
|
||||
};
|
||||
use panel_settings::MessageEditorSettings;
|
||||
pub use panel_settings::{
|
||||
@@ -23,12 +19,7 @@ pub use panel_settings::{
|
||||
use release_channel::ReleaseChannel;
|
||||
use settings::Settings;
|
||||
use ui::px;
|
||||
use workspace::{notifications::DetachAndPromptErr, AppState};
|
||||
|
||||
actions!(
|
||||
collab,
|
||||
[ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
|
||||
);
|
||||
use workspace::AppState;
|
||||
|
||||
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
CollaborationPanelSettings::register(cx);
|
||||
@@ -36,63 +27,13 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
NotificationPanelSettings::register(cx);
|
||||
MessageEditorSettings::register(cx);
|
||||
|
||||
vcs_menu::init(cx);
|
||||
collab_titlebar_item::init(cx);
|
||||
collab_panel::init(cx);
|
||||
channel_view::init(cx);
|
||||
chat_panel::init(cx);
|
||||
collab_panel::init(cx);
|
||||
notification_panel::init(cx);
|
||||
notifications::init(&app_state, cx);
|
||||
}
|
||||
|
||||
pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut WindowContext) {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
if room.is_screen_sharing() {
|
||||
report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
);
|
||||
Task::ready(room.unshare_screen(cx))
|
||||
} else {
|
||||
report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
);
|
||||
room.share_screen(cx)
|
||||
}
|
||||
});
|
||||
toggle_screen_sharing.detach_and_prompt_err("Sharing Screen Failed", cx, |e, _| Some(format!("{:?}\n\nPlease check that you have given Zed permissions to record your screen in Settings.", e)));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
room.update(cx, |room, cx| {
|
||||
let operation = if room.is_muted() {
|
||||
"enable microphone"
|
||||
} else {
|
||||
"disable microphone"
|
||||
};
|
||||
report_call_event_for_room(operation, room.id(), room.channel_id(), &client);
|
||||
|
||||
room.toggle_mute(cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
room.update(cx, |room, cx| room.toggle_deafen(cx));
|
||||
}
|
||||
title_bar::init(cx);
|
||||
vcs_menu::init(cx);
|
||||
}
|
||||
|
||||
fn notification_window_options(
|
||||
@@ -122,7 +63,9 @@ fn notification_window_options(
|
||||
kind: WindowKind::PopUp,
|
||||
is_movable: false,
|
||||
display_id: Some(screen.id()),
|
||||
window_background: WindowBackgroundAppearance::default(),
|
||||
window_background: WindowBackgroundAppearance::Transparent,
|
||||
app_id: Some(app_id.to_owned()),
|
||||
window_min_size: None,
|
||||
window_decorations: Some(WindowDecorations::Client),
|
||||
}
|
||||
}
|
||||
|
||||
40
crates/crdb/Cargo.toml
Normal file
@@ -0,0 +1,40 @@
|
||||
[package]
|
||||
name = "crdb"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
path = "src/crdb.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["collections/test-support", "util/test-support"]
|
||||
|
||||
[dependencies]
|
||||
collections = { path = "../collections" }
|
||||
util = { path = "../util" }
|
||||
|
||||
anyhow.workspace = true
|
||||
arrayvec = { version = "0.7.1", features = ["serde"] }
|
||||
bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "6faf816bd5b4b7b2b6ea77495686634732ded095" }
|
||||
futures.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
portable-atomic = { version = "1", features = ["serde"] }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_bare = "0.5"
|
||||
smallvec.workspace = true
|
||||
uuid = { version = "1.3", features = ["v4", "fast-rng", "serde"] }
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
|
||||
async-broadcast = "0.4"
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
rand.workspace = true
|
||||
smol.workspace = true
|
||||
35
crates/crdb/src/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# CRDB: A conflict-free replicated database for code and markdown
|
||||
|
||||
Our goal is for this database to contain all the text inserted in Zed.
|
||||
|
||||
## Contexts
|
||||
|
||||
The database is divided into *contexts*, with each context containing a collection of *documents*.
|
||||
|
||||
### Contexts contain documents
|
||||
|
||||
These contexts and the documents are really just namespaces in a global table of document *fragments*. Each fragment is a sequence of one or more characters, which may or may not be visible in a given branch.
|
||||
|
||||
#### Documents with paths are files
|
||||
|
||||
Documents in a context can be associated with metadata. If a document is associated with a relative path, it represents a file. A context that contains files can be synchronized with a directory tree on the file system, much like a Git repository.
|
||||
|
||||
#### Conversations are also documents
|
||||
|
||||
Contexts can also be associated with conversations, which are special documents that embed other documents that represent messages. Messages are embedded via a mechanism called *portals*, which will be discussed further below.
|
||||
|
||||
### Contexts occupy a hierarchical namespace
|
||||
|
||||
For example, at genesis, zed.dev will contain the following channels:
|
||||
|
||||
#zed
|
||||
- This is where people get oriented about what Zed is all about. We'll link to it from our landing page.
|
||||
#zed/staff
|
||||
- Here's where we talk about stuff private to the company, and host company-specific files.
|
||||
#zed/insiders
|
||||
- Users we've worked with.
|
||||
#zed/zed
|
||||
- This contains the actual source code for Zed.
|
||||
- It also has a conversation where potential contributors can engage with us and each other.
|
||||
#zed/zed/debugger
|
||||
- A subcontext of zed/zed where we talk about and eventually implement a debugger. Associated with a different branch of zed/zed where the debugger is being built, but could also have multiple branches. Branches and contexts are independent.
|
||||
1957
crates/crdb/src/btree.rs
Normal file
755
crates/crdb/src/btree/cursor.rs
Normal file
@@ -0,0 +1,755 @@
|
||||
use super::*;
|
||||
use arrayvec::ArrayVec;
|
||||
use std::{cmp::Ordering, mem, sync::Arc};
|
||||
|
||||
#[derive(Clone)]
|
||||
struct StackEntry<'a, T: Item, D> {
|
||||
tree: &'a Sequence<T>,
|
||||
index: usize,
|
||||
position: D,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Cursor<'a, T: Item, D> {
|
||||
tree: &'a Sequence<T>,
|
||||
stack: ArrayVec<StackEntry<'a, T, D>, 16>,
|
||||
position: D,
|
||||
did_seek: bool,
|
||||
at_end: bool,
|
||||
}
|
||||
|
||||
pub struct Iter<'a, T: Item> {
|
||||
tree: &'a Sequence<T>,
|
||||
stack: ArrayVec<StackEntry<'a, T, ()>, 16>,
|
||||
}
|
||||
|
||||
impl<'a, T, D> Cursor<'a, T, D>
|
||||
where
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn new(tree: &'a Sequence<T>) -> Self {
|
||||
Self {
|
||||
tree,
|
||||
stack: ArrayVec::new(),
|
||||
position: D::default(),
|
||||
did_seek: false,
|
||||
at_end: tree.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.did_seek = false;
|
||||
self.at_end = self.tree.is_empty();
|
||||
self.stack.truncate(0);
|
||||
self.position = D::default();
|
||||
}
|
||||
|
||||
pub fn start(&self) -> &D {
|
||||
&self.position
|
||||
}
|
||||
|
||||
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
|
||||
if let Some(item_summary) = self.item_summary() {
|
||||
let mut end = self.start().clone();
|
||||
end.add_summary(item_summary, cx);
|
||||
end
|
||||
} else {
|
||||
self.start().clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item(&self) -> Option<&'a T> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf { ref items, .. } => {
|
||||
if entry.index == items.len() {
|
||||
None
|
||||
} else {
|
||||
Some(&items[entry.index])
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_summary(&self) -> Option<&'a T::Summary> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf {
|
||||
ref item_summaries, ..
|
||||
} => {
|
||||
if entry.index == item_summaries.len() {
|
||||
None
|
||||
} else {
|
||||
Some(&item_summaries[entry.index])
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev_item(&self) -> Option<&'a T> {
|
||||
self.assert_did_seek();
|
||||
if let Some(entry) = self.stack.last() {
|
||||
if entry.index == 0 {
|
||||
if let Some(prev_leaf) = self.prev_leaf() {
|
||||
Some(prev_leaf.0.items().last().unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
match *entry.tree.0 {
|
||||
Node::Leaf { ref items, .. } => Some(&items[entry.index - 1]),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
} else if self.at_end {
|
||||
self.tree.last()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn prev_leaf(&self) -> Option<&'a Sequence<T>> {
|
||||
for entry in self.stack.iter().rev().skip(1) {
|
||||
if entry.index != 0 {
|
||||
match *entry.tree.0 {
|
||||
Node::Internal {
|
||||
ref child_trees, ..
|
||||
} => {
|
||||
for tree in child_trees[..entry.index].iter().rev() {
|
||||
if let ChildTree::Loaded { tree } = tree {
|
||||
if let Some(leaf) = tree.rightmost_leaf() {
|
||||
return Some(leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf { .. } => unreachable!(),
|
||||
};
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.prev_internal(|_| true, cx)
|
||||
}
|
||||
|
||||
fn prev_internal<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
{
|
||||
if !self.did_seek {
|
||||
self.did_seek = true;
|
||||
self.at_end = true;
|
||||
}
|
||||
|
||||
if self.at_end {
|
||||
self.position = D::default();
|
||||
self.at_end = self.tree.is_empty();
|
||||
if !self.tree.is_empty() {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: self.tree.0.child_summaries().len(),
|
||||
position: D::from_summary(self.tree.summary(), cx),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let mut descending = false;
|
||||
while !self.stack.is_empty() {
|
||||
if let Some(StackEntry { position, .. }) = self.stack.iter().rev().nth(1) {
|
||||
self.position = position.clone();
|
||||
} else {
|
||||
self.position = D::default();
|
||||
}
|
||||
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
if !descending {
|
||||
if entry.index == 0 {
|
||||
self.stack.pop();
|
||||
continue;
|
||||
} else {
|
||||
entry.index -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
for summary in &entry.tree.0.child_summaries()[..entry.index] {
|
||||
self.position.add_summary(summary, cx);
|
||||
}
|
||||
entry.position = self.position.clone();
|
||||
|
||||
descending = filter_node(&entry.tree.0.child_summaries()[entry.index]);
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal { child_trees, .. } => {
|
||||
if descending {
|
||||
if let ChildTree::Loaded { tree } = &child_trees[entry.index] {
|
||||
self.stack.push(StackEntry {
|
||||
position: D::default(),
|
||||
tree,
|
||||
index: tree.0.child_summaries().len() - 1,
|
||||
});
|
||||
} else {
|
||||
descending = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf { .. } => {
|
||||
if descending {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.next_internal(|_| true, cx)
|
||||
}
|
||||
|
||||
fn next_internal<F>(&mut self, mut filter_node: F, cx: &<T::Summary as Summary>::Context)
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
{
|
||||
let mut descend = false;
|
||||
|
||||
if self.stack.is_empty() {
|
||||
if !self.at_end {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: D::default(),
|
||||
});
|
||||
descend = true;
|
||||
}
|
||||
self.did_seek = true;
|
||||
}
|
||||
|
||||
while !self.stack.is_empty() {
|
||||
let new_subtree = {
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal {
|
||||
child_trees,
|
||||
child_summaries,
|
||||
..
|
||||
} => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
}
|
||||
|
||||
while entry.index < child_summaries.len() {
|
||||
let next_summary = &child_summaries[entry.index];
|
||||
if filter_node(next_summary) && child_trees[entry.index].is_loaded() {
|
||||
break;
|
||||
} else {
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(next_summary, cx);
|
||||
self.position.add_summary(next_summary, cx);
|
||||
}
|
||||
}
|
||||
|
||||
child_trees.get(entry.index)
|
||||
}
|
||||
Node::Leaf { item_summaries, .. } => {
|
||||
if !descend {
|
||||
let item_summary = &item_summaries[entry.index];
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(item_summary, cx);
|
||||
self.position.add_summary(item_summary, cx);
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(next_item_summary) = item_summaries.get(entry.index) {
|
||||
if filter_node(next_item_summary) {
|
||||
return;
|
||||
} else {
|
||||
entry.index += 1;
|
||||
entry.position.add_summary(next_item_summary, cx);
|
||||
self.position.add_summary(next_item_summary, cx);
|
||||
}
|
||||
} else {
|
||||
break None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(subtree) = new_subtree {
|
||||
let subtree = if let ChildTree::Loaded { tree } = subtree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
descend = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: subtree,
|
||||
index: 0,
|
||||
position: self.position.clone(),
|
||||
});
|
||||
} else {
|
||||
descend = false;
|
||||
self.stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
self.at_end = self.stack.is_empty();
|
||||
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
|
||||
}
|
||||
|
||||
fn assert_did_seek(&self) {
|
||||
assert!(
|
||||
self.did_seek,
|
||||
"Must call `seek`, `next` or `prev` before calling this method"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, D> Cursor<'a, T, D>
|
||||
where
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn seek<Target>(
|
||||
&mut self,
|
||||
pos: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
self.reset();
|
||||
self.seek_internal(pos, bias, &mut (), cx)
|
||||
}
|
||||
|
||||
pub fn seek_forward<Target>(
|
||||
&mut self,
|
||||
pos: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
self.seek_internal(pos, bias, &mut (), cx)
|
||||
}
|
||||
|
||||
pub fn slice<Target>(
|
||||
&mut self,
|
||||
end: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> Sequence<T>
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
{
|
||||
let mut slice = SliceSeekAggregate {
|
||||
tree: Sequence::new(),
|
||||
leaf_items: ArrayVec::new(),
|
||||
leaf_item_summaries: ArrayVec::new(),
|
||||
leaf_summary: T::Summary::default(),
|
||||
};
|
||||
self.seek_internal(end, bias, &mut slice, cx);
|
||||
slice.tree
|
||||
}
|
||||
|
||||
pub fn suffix(&mut self, cx: &<T::Summary as Summary>::Context) -> Sequence<T> {
|
||||
self.slice(&End::new(), Bias::Right, cx)
|
||||
}
|
||||
|
||||
pub fn summary<Target, Output>(
|
||||
&mut self,
|
||||
end: &Target,
|
||||
bias: Bias,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> Output
|
||||
where
|
||||
Target: SeekTarget<'a, T::Summary, D>,
|
||||
Output: Dimension<'a, T::Summary>,
|
||||
{
|
||||
let mut summary = SummarySeekAggregate(Output::default());
|
||||
self.seek_internal(end, bias, &mut summary, cx);
|
||||
summary.0
|
||||
}
|
||||
|
||||
fn seek_internal(
|
||||
&mut self,
|
||||
target: &dyn SeekTarget<'a, T::Summary, D>,
|
||||
bias: Bias,
|
||||
aggregate: &mut dyn SeekAggregate<'a, T>,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) -> bool {
|
||||
debug_assert!(
|
||||
target.seek_cmp(&self.position, cx) >= Ordering::Equal,
|
||||
"cannot seek backward from {:?} to {:?}",
|
||||
self.position,
|
||||
target
|
||||
);
|
||||
|
||||
if !self.did_seek {
|
||||
self.did_seek = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: Default::default(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut ascending = false;
|
||||
'outer: while let Some(entry) = self.stack.last_mut() {
|
||||
match *entry.tree.0 {
|
||||
Node::Internal {
|
||||
ref child_summaries,
|
||||
ref child_trees,
|
||||
..
|
||||
} => {
|
||||
if ascending {
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
}
|
||||
|
||||
for (child_tree, child_summary) in child_trees[entry.index..]
|
||||
.iter()
|
||||
.zip(&child_summaries[entry.index..])
|
||||
{
|
||||
let mut child_end = self.position.clone();
|
||||
child_end.add_summary(child_summary, cx);
|
||||
|
||||
let comparison = target.seek_cmp(&child_end, cx);
|
||||
if comparison == Ordering::Greater
|
||||
|| (comparison == Ordering::Equal && bias == Bias::Right)
|
||||
|| !child_tree.is_loaded()
|
||||
{
|
||||
self.position = child_end;
|
||||
aggregate.push_tree(child_tree, child_summary, cx);
|
||||
entry.index += 1;
|
||||
entry.position = self.position.clone();
|
||||
} else {
|
||||
let child_tree = if let ChildTree::Loaded { tree } = child_tree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
self.stack.push(StackEntry {
|
||||
tree: child_tree,
|
||||
index: 0,
|
||||
position: self.position.clone(),
|
||||
});
|
||||
ascending = false;
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
Node::Leaf {
|
||||
ref items,
|
||||
ref item_summaries,
|
||||
..
|
||||
} => {
|
||||
aggregate.begin_leaf();
|
||||
|
||||
for (item, item_summary) in items[entry.index..]
|
||||
.iter()
|
||||
.zip(&item_summaries[entry.index..])
|
||||
{
|
||||
let mut child_end = self.position.clone();
|
||||
child_end.add_summary(item_summary, cx);
|
||||
|
||||
let comparison = target.seek_cmp(&child_end, cx);
|
||||
if comparison == Ordering::Greater
|
||||
|| (comparison == Ordering::Equal && bias == Bias::Right)
|
||||
{
|
||||
self.position = child_end;
|
||||
aggregate.push_item(item, item_summary, cx);
|
||||
entry.index += 1;
|
||||
} else {
|
||||
aggregate.end_leaf(cx);
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
|
||||
aggregate.end_leaf(cx);
|
||||
}
|
||||
}
|
||||
|
||||
self.stack.pop();
|
||||
ascending = true;
|
||||
}
|
||||
|
||||
self.at_end = self.stack.is_empty();
|
||||
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
|
||||
|
||||
let mut end = self.position.clone();
|
||||
if bias == Bias::Left {
|
||||
if let Some(summary) = self.item_summary() {
|
||||
end.add_summary(summary, cx);
|
||||
}
|
||||
}
|
||||
|
||||
target.seek_cmp(&end, cx) == Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iter<'a, T> {
|
||||
pub(crate) fn new(tree: &'a Sequence<T>) -> Self {
|
||||
Self {
|
||||
tree,
|
||||
stack: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> Iterator for Iter<'a, T> {
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut descend = false;
|
||||
|
||||
if self.stack.is_empty() {
|
||||
self.stack.push(StackEntry {
|
||||
tree: self.tree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
descend = true;
|
||||
}
|
||||
|
||||
while !self.stack.is_empty() {
|
||||
let new_subtree = {
|
||||
let entry = self.stack.last_mut().unwrap();
|
||||
match entry.tree.0.as_ref() {
|
||||
Node::Internal { child_trees, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
while entry.index < child_trees.len() {
|
||||
if child_trees[entry.index].is_loaded() {
|
||||
break;
|
||||
}
|
||||
entry.index += 1;
|
||||
}
|
||||
|
||||
child_trees.get(entry.index)
|
||||
}
|
||||
Node::Leaf { items, .. } => {
|
||||
if !descend {
|
||||
entry.index += 1;
|
||||
}
|
||||
|
||||
if let Some(next_item) = items.get(entry.index) {
|
||||
return Some(next_item);
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(subtree) = new_subtree {
|
||||
let subtree = if let ChildTree::Loaded { tree } = subtree {
|
||||
tree
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
descend = true;
|
||||
self.stack.push(StackEntry {
|
||||
tree: subtree,
|
||||
index: 0,
|
||||
position: (),
|
||||
});
|
||||
} else {
|
||||
descend = false;
|
||||
self.stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, S, D> Iterator for Cursor<'a, T, D>
|
||||
where
|
||||
T: Item<Summary = S>,
|
||||
S: Summary<Context = ()>,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.did_seek {
|
||||
self.next(&());
|
||||
}
|
||||
|
||||
if let Some(item) = self.item() {
|
||||
self.next(&());
|
||||
Some(item)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FilterCursor<'a, F, T: Item, D> {
|
||||
cursor: Cursor<'a, T, D>,
|
||||
filter_node: F,
|
||||
}
|
||||
|
||||
impl<'a, F, T, D> FilterCursor<'a, F, T, D>
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
T: Item,
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
pub fn new(tree: &'a Sequence<T>, filter_node: F) -> Self {
|
||||
let cursor = tree.cursor::<D>();
|
||||
Self {
|
||||
cursor,
|
||||
filter_node,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start(&self) -> &D {
|
||||
self.cursor.start()
|
||||
}
|
||||
|
||||
pub fn end(&self, cx: &<T::Summary as Summary>::Context) -> D {
|
||||
self.cursor.end(cx)
|
||||
}
|
||||
|
||||
pub fn item(&self) -> Option<&'a T> {
|
||||
self.cursor.item()
|
||||
}
|
||||
|
||||
pub fn item_summary(&self) -> Option<&'a T::Summary> {
|
||||
self.cursor.item_summary()
|
||||
}
|
||||
|
||||
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.cursor.next_internal(&mut self.filter_node, cx);
|
||||
}
|
||||
|
||||
pub fn prev(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.cursor.prev_internal(&mut self.filter_node, cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, F, T, S, U> Iterator for FilterCursor<'a, F, T, U>
|
||||
where
|
||||
F: FnMut(&T::Summary) -> bool,
|
||||
T: Item<Summary = S>,
|
||||
S: Summary<Context = ()>, //Context for the summary must be unit type, as .next() doesn't take arguments
|
||||
U: Dimension<'a, T::Summary>,
|
||||
{
|
||||
type Item = &'a T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.cursor.did_seek {
|
||||
self.next(&());
|
||||
}
|
||||
|
||||
if let Some(item) = self.item() {
|
||||
self.cursor.next_internal(&mut self.filter_node, &());
|
||||
Some(item)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait SeekAggregate<'a, T: Item> {
|
||||
fn begin_leaf(&mut self);
|
||||
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context);
|
||||
fn push_item(
|
||||
&mut self,
|
||||
item: &'a T,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
);
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
tree: &'a ChildTree<T>,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
);
|
||||
}
|
||||
|
||||
struct SliceSeekAggregate<T: Item> {
|
||||
tree: Sequence<T>,
|
||||
leaf_items: ArrayVec<T, { 2 * TREE_BASE }>,
|
||||
leaf_item_summaries: ArrayVec<T::Summary, { 2 * TREE_BASE }>,
|
||||
leaf_summary: T::Summary,
|
||||
}
|
||||
|
||||
struct SummarySeekAggregate<D>(D);
|
||||
|
||||
impl<'a, T: Item> SeekAggregate<'a, T> for () {
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_item(&mut self, _: &T, _: &T::Summary, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
_: &ChildTree<T>,
|
||||
_: &T::Summary,
|
||||
_: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||
self.tree.append(
|
||||
Sequence(Arc::new(Node::Leaf {
|
||||
saved_id: SavedId::default(),
|
||||
summary: mem::take(&mut self.leaf_summary),
|
||||
items: mem::take(&mut self.leaf_items),
|
||||
item_summaries: mem::take(&mut self.leaf_item_summaries),
|
||||
})),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
fn push_item(&mut self, item: &T, summary: &T::Summary, cx: &<T::Summary as Summary>::Context) {
|
||||
self.leaf_items.push(item.clone());
|
||||
self.leaf_item_summaries.push(summary.clone());
|
||||
Summary::add_summary(&mut self.leaf_summary, summary, cx);
|
||||
}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
tree: &ChildTree<T>,
|
||||
summary: &T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
self.tree.append_internal(tree.clone(), summary.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Item, D> SeekAggregate<'a, T> for SummarySeekAggregate<D>
|
||||
where
|
||||
D: Dimension<'a, T::Summary>,
|
||||
{
|
||||
fn begin_leaf(&mut self) {}
|
||||
fn end_leaf(&mut self, _: &<T::Summary as Summary>::Context) {}
|
||||
fn push_item(&mut self, _: &T, summary: &'a T::Summary, cx: &<T::Summary as Summary>::Context) {
|
||||
self.0.add_summary(summary, cx);
|
||||
}
|
||||
fn push_tree(
|
||||
&mut self,
|
||||
_: &ChildTree<T>,
|
||||
summary: &'a T::Summary,
|
||||
cx: &<T::Summary as Summary>::Context,
|
||||
) {
|
||||
self.0.add_summary(summary, cx);
|
||||
}
|
||||
}
|
||||
594
crates/crdb/src/btree/map.rs
Normal file
@@ -0,0 +1,594 @@
|
||||
use super::{
|
||||
Bias, Dimension, Edit, Item, KeyedItem, KvStore, SavedId, SeekTarget, Sequence, Summary,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::BTreeMap,
|
||||
fmt::{self, Debug},
|
||||
ops::{Bound, RangeBounds},
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Map<K, V>(Sequence<MapEntry<K, V>>)
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct MapEntry<K, V> {
|
||||
key: K,
|
||||
value: V,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub struct MapKey<K>(Option<K>);
|
||||
|
||||
impl<K> Default for MapKey<K> {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MapKeyRef<'a, K>(Option<&'a K>);
|
||||
|
||||
impl<K> Default for MapKeyRef<'_, K> {
|
||||
fn default() -> Self {
|
||||
Self(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Set<K>(Map<K, ()>)
|
||||
where
|
||||
K: Clone + Debug + Ord;
|
||||
|
||||
impl<K, V> Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
|
||||
Sequence::ptr_eq(&this.0, &other.0)
|
||||
}
|
||||
|
||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = (K, V)>) -> Self {
|
||||
let tree = Sequence::from_iter(
|
||||
entries
|
||||
.into_iter()
|
||||
.map(|(key, value)| MapEntry { key, value }),
|
||||
&(),
|
||||
);
|
||||
Self(tree)
|
||||
}
|
||||
|
||||
pub async fn load_root(id: SavedId, kv: &dyn KvStore) -> Result<Self>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
Ok(Self(Sequence::load_root(id, kv).await?))
|
||||
}
|
||||
|
||||
pub async fn load_all(id: SavedId, kv: &dyn KvStore) -> Result<Self>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
let mut sequence = Sequence::load_root(id, kv).await?;
|
||||
sequence.load(kv, &(), |_| true).await?;
|
||||
Ok(Self(sequence))
|
||||
}
|
||||
|
||||
pub async fn load(&mut self, key: &K, kv: &dyn KvStore) -> Result<Option<&V>>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(probe.start.0.as_ref()),
|
||||
Bound::Included(probe.summary.0.as_ref()),
|
||||
);
|
||||
key_range.contains(&Some(key))
|
||||
})
|
||||
.await?;
|
||||
Ok(self.get(key))
|
||||
}
|
||||
|
||||
pub async fn load_from(
|
||||
&mut self,
|
||||
start: &K,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<impl Iterator<Item = (&K, &V)>>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
probe.start.0.as_ref() >= Some(&start) || probe.summary.0.as_ref() >= Some(&start)
|
||||
})
|
||||
.await?;
|
||||
Ok(self.iter_from(start))
|
||||
}
|
||||
|
||||
pub async fn store(&mut self, key: K, value: V, kv: &dyn KvStore) -> Result<()>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(probe.start.0.as_ref()),
|
||||
Bound::Included(probe.summary.0.as_ref()),
|
||||
);
|
||||
key_range.contains(&Some(&key))
|
||||
})
|
||||
.await?;
|
||||
self.insert(key, value);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn save(&self, kv: &dyn KvStore) -> Result<SavedId>
|
||||
where
|
||||
K: Serialize + for<'de> Deserialize<'de>,
|
||||
V: Serialize + for<'de> Deserialize<'de>,
|
||||
{
|
||||
self.0.save(kv).await
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn get<'a>(&self, key: &'a K) -> Option<&V> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
cursor.seek(&MapKeyRef(Some(key)), Bias::Left, &());
|
||||
if let Some(item) = cursor.item() {
|
||||
if key == &item.key {
|
||||
Some(&item.value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_key<'a>(&self, key: &'a K) -> bool {
|
||||
self.get(key).is_some()
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: K, value: V) {
|
||||
self.0.insert_or_replace(MapEntry { key, value }, &());
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
let mut removed = None;
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
let mut new_tree = cursor.slice(&key, Bias::Left, &());
|
||||
if key.seek_cmp(&cursor.end(&()), &()) == Ordering::Equal {
|
||||
removed = Some(cursor.item().unwrap().value.clone());
|
||||
cursor.next(&());
|
||||
}
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
removed
|
||||
}
|
||||
|
||||
pub fn remove_range(&mut self, start: &impl MapSeekTarget<K>, end: &impl MapSeekTarget<K>) {
|
||||
let start = MapSeekTargetAdaptor(start);
|
||||
let end = MapSeekTargetAdaptor(end);
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let mut new_tree = cursor.slice(&start, Bias::Left, &());
|
||||
cursor.seek(&end, Bias::Left, &());
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
}
|
||||
|
||||
/// Returns the key-value pair with the greatest key less than or equal to the given key.
|
||||
pub fn closest(&self, key: &K) -> Option<(&K, &V)> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
cursor.seek(&key, Bias::Right, &());
|
||||
cursor.prev(&());
|
||||
cursor.item().map(|item| (&item.key, &item.value))
|
||||
}
|
||||
|
||||
pub fn iter_from<'a>(&self, from: &'a K) -> impl Iterator<Item = (&K, &V)> {
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let from_key = MapKeyRef(Some(from));
|
||||
cursor.seek(&from_key, Bias::Left, &());
|
||||
|
||||
cursor
|
||||
.into_iter()
|
||||
.map(|map_entry| (&map_entry.key, &map_entry.value))
|
||||
}
|
||||
|
||||
pub fn update<F, T>(&mut self, key: &K, f: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce(&mut V) -> T,
|
||||
{
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
let key = MapKeyRef(Some(key));
|
||||
let mut new_tree = cursor.slice(&key, Bias::Left, &());
|
||||
let mut result = None;
|
||||
if key.seek_cmp(&cursor.end(&()), &()) == Ordering::Equal {
|
||||
let mut updated = cursor.item().unwrap().clone();
|
||||
result = Some(f(&mut updated.value));
|
||||
new_tree.push(updated, &());
|
||||
cursor.next(&());
|
||||
}
|
||||
new_tree.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.0 = new_tree;
|
||||
result
|
||||
}
|
||||
|
||||
pub fn retain<F: FnMut(&K, &V) -> bool>(&mut self, mut predicate: F) {
|
||||
let mut new_map = Sequence::<MapEntry<K, V>>::default();
|
||||
|
||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||
cursor.next(&());
|
||||
while let Some(item) = cursor.item() {
|
||||
if predicate(&item.key, &item.value) {
|
||||
new_map.push(item.clone(), &());
|
||||
}
|
||||
cursor.next(&());
|
||||
}
|
||||
drop(cursor);
|
||||
|
||||
self.0 = new_map;
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&K, &V)> + '_ {
|
||||
self.0.iter().map(|entry| (&entry.key, &entry.value))
|
||||
}
|
||||
|
||||
pub fn values(&self) -> impl Iterator<Item = &V> + '_ {
|
||||
self.0.iter().map(|entry| &entry.value)
|
||||
}
|
||||
|
||||
pub fn insert_tree(&mut self, other: Map<K, V>) {
|
||||
let edits = other
|
||||
.iter()
|
||||
.map(|(key, value)| {
|
||||
Edit::Insert(MapEntry {
|
||||
key: key.to_owned(),
|
||||
value: value.to_owned(),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.0.edit(edits, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Into<BTreeMap<K, V>> for &Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn into(self) -> BTreeMap<K, V> {
|
||||
self.iter()
|
||||
.map(|(replica_id, count)| (replica_id.clone(), count.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> From<&BTreeMap<K, V>> for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn from(value: &BTreeMap<K, V>) -> Self {
|
||||
Map::from_ordered_entries(value.into_iter().map(|(k, v)| (k.clone(), v.clone())))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Debug for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_map().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for Set<T>
|
||||
where
|
||||
T: Clone + Debug + Ord,
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_set().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MapSeekTargetAdaptor<'a, T>(&'a T);
|
||||
|
||||
impl<'a, K: Debug + Clone + Ord, T: MapSeekTarget<K>> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>>
|
||||
for MapSeekTargetAdaptor<'_, T>
|
||||
{
|
||||
fn seek_cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
|
||||
if let Some(key) = &cursor_location.0 {
|
||||
MapSeekTarget::cmp_cursor(self.0, key)
|
||||
} else {
|
||||
Ordering::Greater
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MapSeekTarget<K>: Debug {
|
||||
fn cmp_cursor(&self, cursor_location: &K) -> Ordering;
|
||||
}
|
||||
|
||||
impl<K: Debug + Ord> MapSeekTarget<K> for K {
|
||||
fn cmp_cursor(&self, cursor_location: &K) -> Ordering {
|
||||
self.cmp(cursor_location)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for Map<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone + Debug,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Item for MapEntry<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone,
|
||||
{
|
||||
type Summary = MapKey<K>;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.key()
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> KeyedItem for MapEntry<K, V>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
V: Clone,
|
||||
{
|
||||
type Key = MapKey<K>;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
MapKey(Some(self.key.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Summary for MapKey<K>
|
||||
where
|
||||
K: Clone + Debug,
|
||||
{
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
*self = summary.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> Dimension<'a, MapKey<K>> for MapKeyRef<'a, K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn add_summary(&mut self, summary: &'a MapKey<K>, _: &()) {
|
||||
self.0 = summary.0.as_ref();
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, K> SeekTarget<'a, MapKey<K>, MapKeyRef<'a, K>> for MapKeyRef<'_, K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn seek_cmp(&self, cursor_location: &MapKeyRef<K>, _: &()) -> Ordering {
|
||||
Ord::cmp(&self.0, &cursor_location.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Default for Set<K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> Set<K>
|
||||
where
|
||||
K: Clone + Debug + Ord,
|
||||
{
|
||||
pub fn from_ordered_entries(entries: impl IntoIterator<Item = K>) -> Self {
|
||||
Self(Map::from_ordered_entries(
|
||||
entries.into_iter().map(|key| (key, ())),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: K) {
|
||||
self.0.insert(key, ());
|
||||
}
|
||||
|
||||
pub fn contains(&self, key: &K) -> bool {
|
||||
self.0.get(key).is_some()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &K> + '_ {
|
||||
self.0.iter().map(|(k, _)| k)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_basic() {
|
||||
let mut map = Map::default();
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
|
||||
|
||||
map.insert(3, "c");
|
||||
assert_eq!(map.get(&3), Some(&"c"));
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&3, &"c")]);
|
||||
|
||||
map.insert(1, "a");
|
||||
assert_eq!(map.get(&1), Some(&"a"));
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
|
||||
|
||||
map.insert(2, "b");
|
||||
assert_eq!(map.get(&2), Some(&"b"));
|
||||
assert_eq!(map.get(&1), Some(&"a"));
|
||||
assert_eq!(map.get(&3), Some(&"c"));
|
||||
assert_eq!(
|
||||
map.iter().collect::<Vec<_>>(),
|
||||
vec![(&1, &"a"), (&2, &"b"), (&3, &"c")]
|
||||
);
|
||||
|
||||
assert_eq!(map.closest(&0), None);
|
||||
assert_eq!(map.closest(&1), Some((&1, &"a")));
|
||||
assert_eq!(map.closest(&10), Some((&3, &"c")));
|
||||
|
||||
map.remove(&2);
|
||||
assert_eq!(map.get(&2), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a"), (&3, &"c")]);
|
||||
|
||||
assert_eq!(map.closest(&2), Some((&1, &"a")));
|
||||
|
||||
map.remove(&3);
|
||||
assert_eq!(map.get(&3), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&1, &"a")]);
|
||||
|
||||
map.remove(&1);
|
||||
assert_eq!(map.get(&1), None);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![]);
|
||||
|
||||
map.insert(4, "d");
|
||||
map.insert(5, "e");
|
||||
map.insert(6, "f");
|
||||
map.retain(|key, _| *key % 2 == 0);
|
||||
assert_eq!(map.iter().collect::<Vec<_>>(), vec![(&4, &"d"), (&6, &"f")]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_from() {
|
||||
let mut map = Map::default();
|
||||
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
map.insert("baa", 3);
|
||||
map.insert("baaab", 4);
|
||||
map.insert("c", 5);
|
||||
|
||||
let result = map
|
||||
.iter_from(&"ba")
|
||||
.take_while(|(key, _)| key.starts_with(&"ba"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(result.len(), 2);
|
||||
assert!(result.iter().find(|(k, _)| k == &&"baa").is_some());
|
||||
assert!(result.iter().find(|(k, _)| k == &&"baaab").is_some());
|
||||
|
||||
let result = map
|
||||
.iter_from(&"c")
|
||||
.take_while(|(key, _)| key.starts_with(&"c"))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(result.len(), 1);
|
||||
assert!(result.iter().find(|(k, _)| k == &&"c").is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_tree() {
|
||||
let mut map = Map::default();
|
||||
map.insert("a", 1);
|
||||
map.insert("b", 2);
|
||||
map.insert("c", 3);
|
||||
|
||||
let mut other = Map::default();
|
||||
other.insert("a", 2);
|
||||
other.insert("b", 2);
|
||||
other.insert("d", 4);
|
||||
|
||||
map.insert_tree(other);
|
||||
|
||||
assert_eq!(map.iter().count(), 4);
|
||||
assert_eq!(map.get(&"a"), Some(&2));
|
||||
assert_eq!(map.get(&"b"), Some(&2));
|
||||
assert_eq!(map.get(&"c"), Some(&3));
|
||||
assert_eq!(map.get(&"d"), Some(&4));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_between_and_path_successor() {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PathDescendants<'a>(&'a Path);
|
||||
|
||||
impl MapSeekTarget<PathBuf> for PathDescendants<'_> {
|
||||
fn cmp_cursor(&self, key: &PathBuf) -> Ordering {
|
||||
if key.starts_with(&self.0) {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
self.0.cmp(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut map = Map::default();
|
||||
|
||||
map.insert(PathBuf::from("a"), 1);
|
||||
map.insert(PathBuf::from("a/a"), 1);
|
||||
map.insert(PathBuf::from("b"), 2);
|
||||
map.insert(PathBuf::from("b/a/a"), 3);
|
||||
map.insert(PathBuf::from("b/a/a/a/b"), 4);
|
||||
map.insert(PathBuf::from("c"), 5);
|
||||
map.insert(PathBuf::from("c/a"), 6);
|
||||
|
||||
map.remove_range(
|
||||
&PathBuf::from("b/a"),
|
||||
&PathDescendants(&PathBuf::from("b/a")),
|
||||
);
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
assert_eq!(map.get(&PathBuf::from("b/a/a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("b/a/a/a/b")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("c")), Some(&5));
|
||||
assert_eq!(map.get(&PathBuf::from("c/a")), Some(&6));
|
||||
|
||||
map.remove_range(&PathBuf::from("c"), &PathDescendants(&PathBuf::from("c")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), Some(&1));
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
assert_eq!(map.get(&PathBuf::from("c")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("c/a")), None);
|
||||
|
||||
map.remove_range(&PathBuf::from("a"), &PathDescendants(&PathBuf::from("a")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("a/a")), None);
|
||||
assert_eq!(map.get(&PathBuf::from("b")), Some(&2));
|
||||
|
||||
map.remove_range(&PathBuf::from("b"), &PathDescendants(&PathBuf::from("b")));
|
||||
|
||||
assert_eq!(map.get(&PathBuf::from("b")), None);
|
||||
}
|
||||
}
|
||||
2840
crates/crdb/src/crdb.rs
Normal file
127
crates/crdb/src/dense_id.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use crate::btree;
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::iter;
|
||||
|
||||
lazy_static! {
|
||||
static ref MIN: DenseId = DenseId::min();
|
||||
static ref MAX: DenseId = DenseId::max();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
pub struct DenseId(SmallVec<[u64; 4]>);
|
||||
|
||||
impl DenseId {
|
||||
pub fn min() -> Self {
|
||||
Self(smallvec![u64::MIN])
|
||||
}
|
||||
|
||||
pub fn max() -> Self {
|
||||
Self(smallvec![u64::MAX])
|
||||
}
|
||||
|
||||
pub fn min_ref() -> &'static Self {
|
||||
&*MIN
|
||||
}
|
||||
|
||||
pub fn max_ref() -> &'static Self {
|
||||
&*MAX
|
||||
}
|
||||
|
||||
pub fn assign(&mut self, other: &Self) {
|
||||
self.0.resize(other.0.len(), 0);
|
||||
self.0.copy_from_slice(&other.0);
|
||||
}
|
||||
|
||||
pub fn between(lhs: &Self, rhs: &Self) -> Self {
|
||||
let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
|
||||
let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
|
||||
let mut location = SmallVec::new();
|
||||
for (lhs, rhs) in lhs.zip(rhs) {
|
||||
let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
|
||||
location.push(mid);
|
||||
if mid > lhs {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Self(location)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DenseId {
|
||||
fn default() -> Self {
|
||||
Self::min()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Item for DenseId {
|
||||
type Summary = DenseId;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for DenseId {
|
||||
type Key = DenseId;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Summary for DenseId {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
self.assign(summary);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
use std::mem;
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_dense_id(mut rng: StdRng) {
|
||||
let mut lhs = Default::default();
|
||||
let mut rhs = Default::default();
|
||||
while lhs == rhs {
|
||||
lhs = DenseId(
|
||||
(0..rng.gen_range(1..=5))
|
||||
.map(|_| rng.gen_range(0..=100))
|
||||
.collect(),
|
||||
);
|
||||
rhs = DenseId(
|
||||
(0..rng.gen_range(1..=5))
|
||||
.map(|_| rng.gen_range(0..=100))
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
if lhs > rhs {
|
||||
mem::swap(&mut lhs, &mut rhs);
|
||||
}
|
||||
|
||||
let middle = DenseId::between(&lhs, &rhs);
|
||||
assert!(middle > lhs);
|
||||
assert!(middle < rhs);
|
||||
for ix in 0..middle.0.len() - 1 {
|
||||
assert!(
|
||||
middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
|
||||
|| middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
699
crates/crdb/src/history.rs
Normal file
@@ -0,0 +1,699 @@
|
||||
use std::{cmp::Ordering, iter, ops::RangeBounds};
|
||||
|
||||
use crate::{
|
||||
btree::{self, Bias, KvStore, SavedId},
|
||||
messages::Operation,
|
||||
OperationCount, OperationId, ReplicaId, RevisionId,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeSet, Bound, HashMap, HashSet, VecDeque};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedHistory {
|
||||
operations: SavedId,
|
||||
next_operation_id: OperationId,
|
||||
max_operation_ids: SavedId,
|
||||
deferred_operations: SavedId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct History {
|
||||
operations: btree::Map<OperationId, Operation>,
|
||||
next_operation_id: OperationId,
|
||||
max_operation_ids: btree::Map<ReplicaId, OperationCount>,
|
||||
deferred_operations: btree::Sequence<DeferredOperation>,
|
||||
}
|
||||
|
||||
impl History {
|
||||
pub fn new(replica_id: ReplicaId) -> Self {
|
||||
Self {
|
||||
operations: Default::default(),
|
||||
next_operation_id: OperationId::new(replica_id),
|
||||
max_operation_ids: Default::default(),
|
||||
deferred_operations: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||
btree::Map::ptr_eq(&self.operations, &other.operations)
|
||||
&& btree::Map::ptr_eq(&self.max_operation_ids, &other.max_operation_ids)
|
||||
&& btree::Sequence::ptr_eq(&self.deferred_operations, &other.deferred_operations)
|
||||
&& self.next_operation_id == other.next_operation_id
|
||||
}
|
||||
|
||||
pub async fn load(saved_history: SavedHistory, kv: &dyn KvStore) -> Result<Self> {
|
||||
Ok(Self {
|
||||
operations: btree::Map::load_root(saved_history.operations, kv).await?,
|
||||
next_operation_id: saved_history.next_operation_id,
|
||||
max_operation_ids: btree::Map::load_all(saved_history.max_operation_ids, kv).await?,
|
||||
deferred_operations: btree::Sequence::load_root(saved_history.deferred_operations, kv)
|
||||
.await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn save(&self, kv: &dyn KvStore) -> Result<SavedHistory> {
|
||||
Ok(SavedHistory {
|
||||
operations: self.operations.save(kv).await?,
|
||||
next_operation_id: self.next_operation_id,
|
||||
max_operation_ids: self.max_operation_ids.save(kv).await?,
|
||||
deferred_operations: self.deferred_operations.save(kv).await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn replica_id(&self) -> ReplicaId {
|
||||
self.next_operation_id.replica_id
|
||||
}
|
||||
|
||||
pub fn next_operation_id(&mut self) -> OperationId {
|
||||
self.next_operation_id.tick()
|
||||
}
|
||||
|
||||
pub fn max_operation_ids(&self) -> &btree::Map<ReplicaId, OperationCount> {
|
||||
&self.max_operation_ids
|
||||
}
|
||||
|
||||
pub async fn insert(
|
||||
&mut self,
|
||||
operation: Operation,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<SmallVec<[Operation; 1]>> {
|
||||
let op_id = operation.id();
|
||||
self.next_operation_id.observe(op_id);
|
||||
if self
|
||||
.max_operation_ids
|
||||
.load(&op_id.replica_id, kv)
|
||||
.await?
|
||||
.copied()
|
||||
< Some(op_id.operation_count)
|
||||
{
|
||||
self.max_operation_ids
|
||||
.insert(op_id.replica_id, op_id.operation_count);
|
||||
}
|
||||
self.operations.store(op_id, operation, kv).await?;
|
||||
|
||||
self.deferred_operations
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(*probe.start),
|
||||
Bound::Included(*probe.summary),
|
||||
);
|
||||
key_range.contains(&op_id)
|
||||
})
|
||||
.await?;
|
||||
let mut cursor = self.deferred_operations.cursor::<OperationId>();
|
||||
let mut remaining = cursor.slice(&op_id, Bias::Left, &());
|
||||
let mut flushed = SmallVec::new();
|
||||
flushed.extend(
|
||||
cursor
|
||||
.slice(&op_id, Bias::Right, &())
|
||||
.iter()
|
||||
.map(|deferred| deferred.operation.clone()),
|
||||
);
|
||||
remaining.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
self.deferred_operations = remaining;
|
||||
Ok(flushed)
|
||||
}
|
||||
|
||||
pub fn insert_local(&mut self, operation: Operation) {
|
||||
let id = operation.id();
|
||||
self.next_operation_id.observe(operation.id());
|
||||
self.max_operation_ids
|
||||
.insert(id.replica_id, id.operation_count);
|
||||
self.operations.insert(id, operation);
|
||||
}
|
||||
|
||||
pub async fn defer(&mut self, operation: Operation, kv: &dyn KvStore) -> Result<()> {
|
||||
for parent in operation.parent().iter() {
|
||||
self.deferred_operations
|
||||
.load(kv, &(), |probe| {
|
||||
let key_range = (
|
||||
Bound::Excluded(*probe.start),
|
||||
Bound::Included(*probe.summary),
|
||||
);
|
||||
key_range.contains(&operation.id())
|
||||
})
|
||||
.await?;
|
||||
self.deferred_operations.insert_or_replace(
|
||||
DeferredOperation {
|
||||
parent: *parent,
|
||||
operation: operation.clone(),
|
||||
},
|
||||
&(),
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn can_apply(&mut self, operation: &Operation, kv: &dyn KvStore) -> Result<bool> {
|
||||
for parent in operation.parent().iter() {
|
||||
if self.operations.load(parent, kv).await?.is_none() {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub async fn has_applied(&mut self, operation: &Operation, kv: &dyn KvStore) -> Result<bool> {
|
||||
Ok(self.operations.load(&operation.id(), kv).await?.is_some())
|
||||
}
|
||||
|
||||
pub async fn operation(
|
||||
&mut self,
|
||||
id: OperationId,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<Option<&Operation>> {
|
||||
self.operations.load(&id, kv).await
|
||||
}
|
||||
|
||||
pub async fn operations_since(
|
||||
&mut self,
|
||||
version: &btree::Map<ReplicaId, OperationCount>,
|
||||
kv: &dyn KvStore,
|
||||
) -> Result<Vec<Operation>> {
|
||||
let mut new_operations = Vec::new();
|
||||
for (replica_id, end_op_count) in self.max_operation_ids.iter() {
|
||||
let start_op = OperationId {
|
||||
replica_id: *replica_id,
|
||||
operation_count: version
|
||||
.get(&replica_id)
|
||||
.map(|count| OperationCount(count.0 + 1))
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
let end_op = OperationId {
|
||||
replica_id: *replica_id,
|
||||
operation_count: *end_op_count,
|
||||
};
|
||||
|
||||
new_operations.extend(
|
||||
self.operations
|
||||
.load_from(&start_op, kv)
|
||||
.await?
|
||||
.take_while(|(op_id, _)| **op_id <= end_op)
|
||||
.map(|(_, op)| op.clone()),
|
||||
);
|
||||
}
|
||||
Ok(new_operations)
|
||||
}
|
||||
|
||||
pub async fn rewind(&mut self, revision_id: &RevisionId, kv: &dyn KvStore) -> Result<Rewind> {
|
||||
let mut frontier = VecDeque::new();
|
||||
let mut traversed = HashMap::default();
|
||||
for operation_id in revision_id.iter() {
|
||||
let parent_revision = self
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("operation {:?} not found", operation_id))?
|
||||
.parent()
|
||||
.clone();
|
||||
traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_insert(BTreeSet::default())
|
||||
.insert((revision_id.clone(), *operation_id));
|
||||
frontier.push_back(Frontier {
|
||||
source: *operation_id,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(Rewind {
|
||||
history: self,
|
||||
frontier,
|
||||
traversed,
|
||||
ancestors: Default::default(),
|
||||
reachable_len: revision_id.len(),
|
||||
start: revision_id.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct Frontier {
|
||||
source: OperationId,
|
||||
revision: RevisionId,
|
||||
}
|
||||
|
||||
pub struct Rewind<'a> {
|
||||
history: &'a mut History,
|
||||
frontier: VecDeque<Frontier>,
|
||||
traversed: HashMap<RevisionId, BTreeSet<(RevisionId, OperationId)>>,
|
||||
ancestors: HashMap<RevisionId, HashSet<OperationId>>,
|
||||
reachable_len: usize,
|
||||
start: RevisionId,
|
||||
}
|
||||
|
||||
impl Rewind<'_> {
|
||||
pub async fn next(&mut self, kv: &dyn KvStore) -> Result<Option<RevisionId>> {
|
||||
while let Some(frontier) = self.frontier.pop_front() {
|
||||
let reachable_from = self.ancestors.entry(frontier.revision.clone()).or_default();
|
||||
reachable_from.insert(frontier.source);
|
||||
|
||||
if reachable_from.len() == self.reachable_len {
|
||||
self.reachable_len = frontier.revision.len();
|
||||
self.frontier.clear();
|
||||
self.ancestors.clear();
|
||||
self.start = frontier.revision.clone();
|
||||
for operation_id in frontier.revision.iter() {
|
||||
let parent_revision = self
|
||||
.history
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.expect("operation must exist")
|
||||
.parent()
|
||||
.clone();
|
||||
self.traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_default()
|
||||
.insert((frontier.revision.clone(), *operation_id));
|
||||
self.frontier.push_back(Frontier {
|
||||
source: *operation_id,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
|
||||
return Ok(Some(frontier.revision));
|
||||
} else {
|
||||
for operation_id in frontier.revision.iter() {
|
||||
let parent_revision = self
|
||||
.history
|
||||
.operation(*operation_id, kv)
|
||||
.await?
|
||||
.expect("operation must exist")
|
||||
.parent()
|
||||
.clone();
|
||||
self.traversed
|
||||
.entry(parent_revision.clone())
|
||||
.or_default()
|
||||
.insert((frontier.revision.clone(), *operation_id));
|
||||
|
||||
self.frontier.push_back(Frontier {
|
||||
source: frontier.source,
|
||||
revision: parent_revision,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn replay(mut self) -> impl Iterator<Item = ReplayOperation> {
|
||||
let mut stack = VecDeque::new();
|
||||
if let Some(children) = self.traversed.remove(&self.start) {
|
||||
for (child_revision_id, operation_id) in children {
|
||||
stack.push_back(ReplayOperation {
|
||||
parent_revision_id: self.start.clone(),
|
||||
target_revision_id: child_revision_id.clone(),
|
||||
operation_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
iter::from_fn(move || {
|
||||
let entry = stack.pop_front()?;
|
||||
if let Some(children) = self.traversed.remove(&entry.target_revision_id) {
|
||||
for (child_revision, operation_id) in children {
|
||||
stack.push_back(ReplayOperation {
|
||||
parent_revision_id: entry.target_revision_id.clone(),
|
||||
target_revision_id: child_revision.clone(),
|
||||
operation_id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Some(entry)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
pub struct ReplayOperation {
|
||||
pub parent_revision_id: RevisionId,
|
||||
pub target_revision_id: RevisionId,
|
||||
pub operation_id: OperationId,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ReplayOperation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{:?} -> {:?} via {:?}",
|
||||
self.parent_revision_id, self.target_revision_id, self.operation_id
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
struct DeferredOperation {
|
||||
parent: OperationId,
|
||||
operation: Operation,
|
||||
}
|
||||
|
||||
impl PartialEq for DeferredOperation {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.parent == other.parent && self.operation.id() == other.operation.id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DeferredOperation {}
|
||||
|
||||
impl PartialOrd for DeferredOperation {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for DeferredOperation {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.parent
|
||||
.cmp(&other.parent)
|
||||
.then_with(|| self.operation.id().cmp(&other.operation.id()))
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Item for DeferredOperation {
|
||||
type Summary = OperationId;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
self.parent
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for DeferredOperation {
|
||||
type Key = (OperationId, OperationId);
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
(self.parent, self.operation.id())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::btree::tests::InMemoryKv;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_rewind() {
|
||||
let kv = InMemoryKv::default();
|
||||
let mut history = History::new(ReplicaId(0));
|
||||
let op1 = insert_operation(&[], &mut history, &kv).await;
|
||||
let op2 = insert_operation(&[op1.id()], &mut history, &kv).await;
|
||||
let op3 = insert_operation(&[op1.id()], &mut history, &kv).await;
|
||||
let op4 = insert_operation(&[op2.id(), op3.id()], &mut history, &kv).await;
|
||||
let op5 = insert_operation(&[op4.id()], &mut history, &kv).await;
|
||||
let op6 = insert_operation(&[op4.id()], &mut history, &kv).await;
|
||||
let op7 = insert_operation(&[op2.id()], &mut history, &kv).await;
|
||||
let op8 = insert_operation(&[op5.id()], &mut history, &kv).await;
|
||||
let op9 = insert_operation(&[op5.id()], &mut history, &kv).await;
|
||||
let op10 = insert_operation(&[op8.id()], &mut history, &kv).await;
|
||||
let op11 = insert_operation(&[op9.id(), op10.id()], &mut history, &kv).await;
|
||||
|
||||
assert_eq!(
|
||||
rewind(&[op4.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op6.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op6.id()].as_slice()),
|
||||
operation_id: op6.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op5.id(), op6.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id(), op6.id()].as_slice()),
|
||||
operation_id: op5.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id(), op6.id()].as_slice()),
|
||||
operation_id: op6.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op4.id(), op7.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id(), op7.id()].as_slice()),
|
||||
operation_id: op7.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id(), op7.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
},
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
rewind(&[op11.id()], &mut history, &kv).await,
|
||||
&[
|
||||
(
|
||||
RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op11.id()].as_slice()),
|
||||
operation_id: op11.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op5.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op8.id()].as_slice()),
|
||||
operation_id: op8.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
operation_id: op9.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op8.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op9.id(), op10.id()].as_slice()),
|
||||
operation_id: op10.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op4.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op5.id()].as_slice()),
|
||||
operation_id: op5.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op4.id()].as_slice()),
|
||||
operation_id: op4.id(),
|
||||
}]
|
||||
),
|
||||
(
|
||||
RevisionId::from([op1.id()].as_slice()),
|
||||
vec![
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op2.id(),
|
||||
},
|
||||
ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
target_revision_id: RevisionId::from([op2.id(), op3.id()].as_slice()),
|
||||
operation_id: op3.id(),
|
||||
}
|
||||
]
|
||||
),
|
||||
(
|
||||
RevisionId::from([].as_slice()),
|
||||
vec![ReplayOperation {
|
||||
parent_revision_id: RevisionId::from([].as_slice()),
|
||||
target_revision_id: RevisionId::from([op1.id()].as_slice()),
|
||||
operation_id: op1.id(),
|
||||
}]
|
||||
),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
async fn insert_operation(
|
||||
parent: &[OperationId],
|
||||
history: &mut History,
|
||||
kv: &dyn KvStore,
|
||||
) -> Operation {
|
||||
let operation = Operation::CreateBranch(crate::operations::CreateBranch {
|
||||
id: history.next_operation_id(),
|
||||
parent: parent.into(),
|
||||
name: "1".into(),
|
||||
});
|
||||
history.insert(operation.clone(), kv).await.unwrap();
|
||||
operation
|
||||
}
|
||||
|
||||
async fn rewind(
|
||||
revision_id: &[OperationId],
|
||||
history: &mut History,
|
||||
kv: &dyn KvStore,
|
||||
) -> Vec<(RevisionId, Vec<ReplayOperation>)> {
|
||||
let mut rewind = history.rewind(&revision_id.into(), kv).await.unwrap();
|
||||
let mut results = Vec::new();
|
||||
let mut prev_replay = Vec::new();
|
||||
let mut ix = 0;
|
||||
while let Some(ancestor_id) = rewind.next(kv).await.unwrap() {
|
||||
let mut replay = rewind.replay().collect::<Vec<_>>();
|
||||
let suffix_start = replay.len() - prev_replay.len();
|
||||
assert_eq!(prev_replay, &replay[suffix_start..]);
|
||||
prev_replay = replay.clone();
|
||||
drop(replay.drain(suffix_start..));
|
||||
results.push((ancestor_id, replay));
|
||||
|
||||
rewind = history.rewind(&revision_id.into(), kv).await.unwrap();
|
||||
ix += 1;
|
||||
for _ in 0..ix {
|
||||
rewind.next(kv).await.unwrap();
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
||||
182
crates/crdb/src/messages.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use crate::{
|
||||
operations::{CreateBranch, CreateDocument, Edit},
|
||||
BranchId, OperationCount, OperationId, ReplicaId, RepoId, Request, RevisionId, RoomCredentials,
|
||||
};
|
||||
use collections::BTreeMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{any::Any, sync::Arc};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum RequestEnvelope {
|
||||
PublishRepo(PublishRepo),
|
||||
CloneRepo(CloneRepo),
|
||||
ReconnectToRepo(ReconnectToRepo),
|
||||
SyncRepo(SyncRepo),
|
||||
PublishOperations(PublishOperations),
|
||||
}
|
||||
|
||||
impl RequestEnvelope {
|
||||
pub fn unwrap(self) -> Box<dyn Any> {
|
||||
match self {
|
||||
RequestEnvelope::PublishRepo(request) => Box::new(request),
|
||||
RequestEnvelope::CloneRepo(request) => Box::new(request),
|
||||
RequestEnvelope::ReconnectToRepo(request) => Box::new(request),
|
||||
RequestEnvelope::SyncRepo(request) => Box::new(request),
|
||||
RequestEnvelope::PublishOperations(request) => Box::new(request),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Operation> for MessageEnvelope {
|
||||
fn from(value: Operation) -> Self {
|
||||
Self::Operation(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PublishRepo {
|
||||
pub id: RepoId,
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
impl Request for PublishRepo {
|
||||
type Response = PublishRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for PublishRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::PublishRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct PublishRepoResponse {
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CloneRepo {
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
impl Request for CloneRepo {
|
||||
type Response = CloneRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for CloneRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::CloneRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct CloneRepoResponse {
|
||||
pub repo_id: RepoId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct ReconnectToRepo {
|
||||
pub id: RepoId,
|
||||
pub replica_id: ReplicaId,
|
||||
}
|
||||
|
||||
impl Request for ReconnectToRepo {
|
||||
type Response = ReconnectToRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for ReconnectToRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::ReconnectToRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct ReconnectToRepoResponse {
|
||||
pub credentials: RoomCredentials,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SyncRepo {
|
||||
pub id: RepoId,
|
||||
pub max_operation_ids: BTreeMap<ReplicaId, OperationCount>,
|
||||
}
|
||||
|
||||
impl Request for SyncRepo {
|
||||
type Response = SyncRepoResponse;
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for SyncRepo {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::SyncRepo(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SyncRepoResponse {
|
||||
pub operations: Vec<Operation>,
|
||||
pub max_operation_ids: BTreeMap<ReplicaId, OperationCount>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PublishOperations {
|
||||
pub repo_id: RepoId,
|
||||
pub operations: Vec<Operation>,
|
||||
}
|
||||
|
||||
impl Request for PublishOperations {
|
||||
type Response = ();
|
||||
}
|
||||
|
||||
impl Into<RequestEnvelope> for PublishOperations {
|
||||
fn into(self) -> RequestEnvelope {
|
||||
RequestEnvelope::PublishOperations(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum MessageEnvelope {
|
||||
Operation(Operation),
|
||||
}
|
||||
|
||||
impl MessageEnvelope {
|
||||
pub fn unwrap(self) -> Box<dyn Any> {
|
||||
Box::new(match self {
|
||||
MessageEnvelope::Operation(message) => message,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum Operation {
|
||||
CreateDocument(CreateDocument),
|
||||
Edit(Edit),
|
||||
CreateBranch(CreateBranch),
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
pub fn id(&self) -> OperationId {
|
||||
match self {
|
||||
Operation::CreateDocument(op) => op.id,
|
||||
Operation::Edit(op) => op.id,
|
||||
Operation::CreateBranch(op) => op.id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn branch_id(&self) -> BranchId {
|
||||
match self {
|
||||
Operation::CreateBranch(op) => op.id,
|
||||
Operation::CreateDocument(op) => op.branch_id,
|
||||
Operation::Edit(op) => op.branch_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> &RevisionId {
|
||||
match self {
|
||||
Operation::CreateDocument(op) => &op.parent,
|
||||
Operation::Edit(op) => &op.parent,
|
||||
Operation::CreateBranch(op) => &op.parent,
|
||||
}
|
||||
}
|
||||
}
|
||||
286
crates/crdb/src/operations.rs
Normal file
@@ -0,0 +1,286 @@
|
||||
use crate::{
|
||||
btree::{self, Bias},
|
||||
dense_id::DenseId,
|
||||
AnchorRange, BranchId, DocumentFragment, DocumentFragmentSummary, DocumentId, DocumentMetadata,
|
||||
InsertionFragment, OperationId, Revision, RevisionId, RopeBuilder, Tombstone,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp, sync::Arc};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CreateBranch {
|
||||
pub id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
pub name: Arc<str>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct CreateDocument {
|
||||
pub id: DocumentId,
|
||||
pub branch_id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
}
|
||||
|
||||
impl CreateDocument {
|
||||
pub fn apply(self, revision: &mut Revision) {
|
||||
let mut cursor = revision.document_fragments.cursor::<DocumentId>();
|
||||
let mut new_document_fragments = cursor.slice(&self.id, Bias::Right, &());
|
||||
new_document_fragments.push(
|
||||
DocumentFragment {
|
||||
document_id: self.id,
|
||||
location: DenseId::min(),
|
||||
insertion_id: self.id,
|
||||
insertion_subrange: 0..0,
|
||||
tombstones: Default::default(),
|
||||
undo_count: 0,
|
||||
},
|
||||
&(),
|
||||
);
|
||||
new_document_fragments.append(cursor.suffix(&()), &());
|
||||
drop(cursor);
|
||||
|
||||
revision.document_fragments = new_document_fragments;
|
||||
revision.insertion_fragments.insert_or_replace(
|
||||
InsertionFragment {
|
||||
insertion_id: self.id,
|
||||
offset_in_insertion: 0,
|
||||
fragment_location: DenseId::min(),
|
||||
},
|
||||
&(),
|
||||
);
|
||||
revision.document_metadata.insert(
|
||||
self.id,
|
||||
DocumentMetadata {
|
||||
path: None,
|
||||
last_change: self.id,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Edit {
|
||||
pub id: OperationId,
|
||||
pub document_id: DocumentId,
|
||||
pub branch_id: BranchId,
|
||||
pub parent: RevisionId,
|
||||
pub edits: SmallVec<[(AnchorRange, Arc<str>); 2]>,
|
||||
}
|
||||
|
||||
impl Edit {
|
||||
pub fn apply(self, parent_revision: &Revision, revision: &mut Revision) -> Result<()> {
|
||||
if self.edits.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut old_fragments = revision
|
||||
.document_fragments
|
||||
.cursor::<DocumentFragmentSummary>();
|
||||
|
||||
// Slice to the start of the document this to which this operation applies.
|
||||
let mut new_fragments = old_fragments.slice(&self.document_id, Bias::Left, &());
|
||||
let mut new_insertions = Vec::new();
|
||||
let mut new_ropes = RopeBuilder::new(
|
||||
revision.visible_text.cursor(0),
|
||||
revision.hidden_text.cursor(0),
|
||||
);
|
||||
new_ropes.append(
|
||||
new_fragments.summary().visible_len,
|
||||
new_fragments.summary().hidden_len,
|
||||
);
|
||||
|
||||
let mut insertion_offset = 0;
|
||||
let mut current_fragment = old_fragments.item().cloned();
|
||||
for (range, new_text) in self.edits {
|
||||
// We need to tombstone the intersection of the edit's range with fragments that
|
||||
// were visible in the operation's parent revision.
|
||||
for mut parent_fragment in parent_revision
|
||||
.visible_fragments_for_range(range.clone())?
|
||||
.cloned()
|
||||
{
|
||||
// Intersect the parent fragment with the edit's range.
|
||||
if parent_fragment.insertion_id == range.start_insertion_id {
|
||||
parent_fragment.insertion_subrange.start = range.start_offset_in_insertion;
|
||||
}
|
||||
if parent_fragment.insertion_id == range.end_insertion_id {
|
||||
parent_fragment.insertion_subrange.end = cmp::min(
|
||||
parent_fragment.insertion_subrange.end,
|
||||
range.end_offset_in_insertion,
|
||||
);
|
||||
}
|
||||
|
||||
// Find the locations of the parent fragment in the new revision.
|
||||
for fragment_location in revision.fragment_locations(
|
||||
parent_fragment.insertion_id,
|
||||
parent_fragment.insertion_subrange,
|
||||
) {
|
||||
if let Some(fragment) = current_fragment.as_ref() {
|
||||
// Advance to fragment_location if it is greater than the location of the current fragment,
|
||||
if *fragment_location > fragment.location {
|
||||
// Flush the remainder of current fragment.
|
||||
if !fragment.insertion_subrange.is_empty() || fragment.is_sentinel() {
|
||||
new_ropes.push_fragment(fragment, fragment.visible());
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment.clone(), &());
|
||||
}
|
||||
old_fragments.next(&());
|
||||
|
||||
// Append all fragments between the previous fragment and the new fragment_location.
|
||||
let slice = old_fragments.slice(
|
||||
&(self.document_id, fragment_location),
|
||||
Bias::Left,
|
||||
&(),
|
||||
);
|
||||
new_ropes
|
||||
.append(slice.summary().visible_len, slice.summary().hidden_len);
|
||||
new_fragments.append(slice, &());
|
||||
current_fragment = old_fragments.item().cloned();
|
||||
|
||||
// We should always find a fragment when seeking to fragment_location.
|
||||
debug_assert!(current_fragment.is_some());
|
||||
}
|
||||
}
|
||||
|
||||
// If the edit starts at the end of the current fragment, flush it.
|
||||
if let Some(fragment) = current_fragment.as_ref() {
|
||||
if fragment.insertion_id == range.start_insertion_id
|
||||
&& fragment.insertion_subrange.end == range.start_offset_in_insertion
|
||||
{
|
||||
let fragment = current_fragment.take().unwrap();
|
||||
new_ropes.push_fragment(&fragment, fragment.visible());
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment, &());
|
||||
old_fragments.next(&());
|
||||
current_fragment = old_fragments.item().and_then(|fragment| {
|
||||
if fragment.document_id == self.document_id {
|
||||
Some(fragment.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(fragment) = current_fragment.take() {
|
||||
// If we haven't advanced off the end, then the current fragment intersects
|
||||
// the current edit's range.
|
||||
let (prefix, mut intersection, suffix) = fragment.intersect(range.clone());
|
||||
|
||||
// If we have a prefix, push it.
|
||||
if let Some(mut prefix) = prefix {
|
||||
prefix.location = DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
&intersection.location,
|
||||
);
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&prefix)));
|
||||
new_ropes.push_fragment(&prefix, prefix.visible());
|
||||
new_fragments.push(prefix, &());
|
||||
}
|
||||
|
||||
if let Some(suffix) = suffix {
|
||||
intersection.location = DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
&suffix.location,
|
||||
);
|
||||
// If we still have a suffix, the next edit may be inside of it, so set it as
|
||||
// the current fragment and continue the loop.
|
||||
current_fragment = Some(suffix);
|
||||
} else {
|
||||
// Otherwise, advance to the next fragment if it's still part of the same document.
|
||||
old_fragments.next(&());
|
||||
if let Some(next_fragment) = old_fragments.item() {
|
||||
if next_fragment.document_id == self.document_id {
|
||||
current_fragment = Some(next_fragment.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then tombstone the intersecting portion.
|
||||
let was_visible = intersection.visible();
|
||||
intersection.tombstones.push(Tombstone {
|
||||
id: self.id,
|
||||
undo_count: 0,
|
||||
});
|
||||
new_ropes.push_fragment(&intersection, was_visible);
|
||||
new_insertions
|
||||
.push(btree::Edit::Insert(InsertionFragment::new(&intersection)));
|
||||
new_fragments.push(intersection, &());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Move past insertions that were causally after the current operation.
|
||||
while let Some(fragment) = current_fragment.as_ref() {
|
||||
if fragment.insertion_id.is_causally_after(self.id) {
|
||||
new_ropes.push_fragment(fragment, fragment.visible());
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(fragment)));
|
||||
new_fragments.push(fragment.clone(), &());
|
||||
old_fragments.next(&());
|
||||
current_fragment = old_fragments.item().and_then(|fragment| {
|
||||
if fragment.document_id == self.document_id {
|
||||
Some(fragment.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, insert a fragment containing the new text.
|
||||
if !new_text.is_empty() {
|
||||
let fragment = DocumentFragment {
|
||||
document_id: self.document_id,
|
||||
location: DenseId::between(
|
||||
&new_fragments.summary().max_location,
|
||||
current_fragment
|
||||
.as_ref()
|
||||
.map_or(DenseId::max_ref(), |fragment| &fragment.location),
|
||||
),
|
||||
insertion_id: self.id,
|
||||
insertion_subrange: insertion_offset..insertion_offset + new_text.len(),
|
||||
tombstones: Default::default(),
|
||||
undo_count: 0,
|
||||
};
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_ropes.push_str(new_text.as_ref());
|
||||
new_fragments.push(fragment, &());
|
||||
insertion_offset += new_text.len();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(fragment) = current_fragment {
|
||||
if !fragment.insertion_subrange.is_empty() {
|
||||
new_ropes.push_fragment(&fragment, fragment.visible());
|
||||
new_insertions.push(btree::Edit::Insert(InsertionFragment::new(&fragment)));
|
||||
new_fragments.push(fragment, &());
|
||||
}
|
||||
old_fragments.next(&());
|
||||
}
|
||||
|
||||
let suffix = old_fragments.suffix(&());
|
||||
drop(old_fragments);
|
||||
|
||||
new_ropes.append(suffix.summary().visible_len, suffix.summary().hidden_len);
|
||||
let (visible_text, hidden_text) = new_ropes.finish();
|
||||
revision.visible_text = visible_text;
|
||||
revision.hidden_text = hidden_text;
|
||||
|
||||
new_fragments.append(suffix, &());
|
||||
revision.document_fragments = new_fragments;
|
||||
|
||||
new_insertions.sort_unstable_by_key(|edit| edit.key());
|
||||
new_insertions.dedup_by_key(|edit| edit.key());
|
||||
revision.insertion_fragments.edit(new_insertions, &());
|
||||
|
||||
revision.check_invariants();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1443
crates/crdb/src/rope.rs
Normal file
51
crates/crdb/src/rope/offset_utf16.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::ops::{Add, AddAssign, Sub};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub struct OffsetUtf16(pub usize);
|
||||
|
||||
impl<'a> Add<&'a Self> for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
Self(self.0 + other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
Self(self.0 + other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for OffsetUtf16 {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
debug_assert!(*other <= self);
|
||||
Self(self.0 - other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for OffsetUtf16 {
|
||||
type Output = OffsetUtf16;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
Self(self.0 - other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for OffsetUtf16 {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
self.0 += other.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for OffsetUtf16 {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
self.0 += other.0;
|
||||
}
|
||||
}
|
||||
129
crates/crdb/src/rope/point.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Add, AddAssign, Sub},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct Point {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl Point {
|
||||
pub const MAX: Self = Self {
|
||||
row: u32::MAX,
|
||||
column: u32::MAX,
|
||||
};
|
||||
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
Point { row, column }
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
Point::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn parse_str(s: &str) -> Self {
|
||||
let mut point = Self::zero();
|
||||
for (row, line) in s.split('\n').enumerate() {
|
||||
point.row = row as u32;
|
||||
point.column = line.len() as u32;
|
||||
}
|
||||
point
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.row == 0 && self.column == 0
|
||||
}
|
||||
|
||||
pub fn saturating_sub(self, other: Self) -> Self {
|
||||
if self < other {
|
||||
Self::zero()
|
||||
} else {
|
||||
self - other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
self + *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
if other.row == 0 {
|
||||
Point::new(self.row, self.column + other.column)
|
||||
} else {
|
||||
Point::new(self.row + other.row, other.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
self - *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Point {
|
||||
type Output = Point;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
|
||||
if self.row == other.row {
|
||||
Point::new(0, self.column - other.column)
|
||||
} else {
|
||||
Point::new(self.row - other.row, self.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for Point {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
*self += *other;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for Point {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
if other.row == 0 {
|
||||
self.column += other.column;
|
||||
} else {
|
||||
self.row += other.row;
|
||||
self.column = other.column;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Point {
|
||||
fn partial_cmp(&self, other: &Point) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Point {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
let a = (self.row as usize) << 32 | self.column as usize;
|
||||
let b = (other.row as usize) << 32 | other.column as usize;
|
||||
a.cmp(&b)
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
fn cmp(&self, other: &Point) -> Ordering {
|
||||
match self.row.cmp(&other.row) {
|
||||
Ordering::Equal => self.column.cmp(&other.column),
|
||||
comparison @ _ => comparison,
|
||||
}
|
||||
}
|
||||
}
|
||||
119
crates/crdb/src/rope/point_utf16.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Add, AddAssign, Sub},
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
|
||||
pub struct PointUtf16 {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
impl PointUtf16 {
|
||||
pub const MAX: Self = Self {
|
||||
row: u32::MAX,
|
||||
column: u32::MAX,
|
||||
};
|
||||
|
||||
pub fn new(row: u32, column: u32) -> Self {
|
||||
PointUtf16 { row, column }
|
||||
}
|
||||
|
||||
pub fn zero() -> Self {
|
||||
PointUtf16::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn is_zero(&self) -> bool {
|
||||
self.row == 0 && self.column == 0
|
||||
}
|
||||
|
||||
pub fn saturating_sub(self, other: Self) -> Self {
|
||||
if self < other {
|
||||
Self::zero()
|
||||
} else {
|
||||
self - other
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Add<&'a Self> for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn add(self, other: &'a Self) -> Self::Output {
|
||||
self + *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn add(self, other: Self) -> Self::Output {
|
||||
if other.row == 0 {
|
||||
PointUtf16::new(self.row, self.column + other.column)
|
||||
} else {
|
||||
PointUtf16::new(self.row + other.row, other.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Sub<&'a Self> for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn sub(self, other: &'a Self) -> Self::Output {
|
||||
self - *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for PointUtf16 {
|
||||
type Output = PointUtf16;
|
||||
|
||||
fn sub(self, other: Self) -> Self::Output {
|
||||
debug_assert!(other <= self);
|
||||
|
||||
if self.row == other.row {
|
||||
PointUtf16::new(0, self.column - other.column)
|
||||
} else {
|
||||
PointUtf16::new(self.row - other.row, self.column)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AddAssign<&'a Self> for PointUtf16 {
|
||||
fn add_assign(&mut self, other: &'a Self) {
|
||||
*self += *other;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Self> for PointUtf16 {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
if other.row == 0 {
|
||||
self.column += other.column;
|
||||
} else {
|
||||
self.row += other.row;
|
||||
self.column = other.column;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for PointUtf16 {
|
||||
fn partial_cmp(&self, other: &PointUtf16) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for PointUtf16 {
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
fn cmp(&self, other: &PointUtf16) -> Ordering {
|
||||
let a = (self.row as usize) << 32 | self.column as usize;
|
||||
let b = (other.row as usize) << 32 | other.column as usize;
|
||||
a.cmp(&b)
|
||||
}
|
||||
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
fn cmp(&self, other: &PointUtf16) -> Ordering {
|
||||
match self.row.cmp(&other.row) {
|
||||
Ordering::Equal => self.column.cmp(&other.column),
|
||||
comparison @ _ => comparison,
|
||||
}
|
||||
}
|
||||
}
|
||||
58
crates/crdb/src/rope/unclipped.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use super::{ChunkSummary, TextDimension, TextSummary};
|
||||
use crate::btree;
|
||||
use std::ops::{Add, AddAssign, Sub, SubAssign};
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Unclipped<T>(pub T);
|
||||
|
||||
impl<T> From<T> for Unclipped<T> {
|
||||
fn from(value: T) -> Self {
|
||||
Unclipped(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: btree::Dimension<'a, ChunkSummary>> btree::Dimension<'a, ChunkSummary>
|
||||
for Unclipped<T>
|
||||
{
|
||||
fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
|
||||
self.0.add_summary(summary, &());
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: TextDimension> TextDimension for Unclipped<T> {
|
||||
fn from_text_summary(summary: &TextSummary) -> Self {
|
||||
Unclipped(T::from_text_summary(summary))
|
||||
}
|
||||
|
||||
fn add_assign(&mut self, other: &Self) {
|
||||
TextDimension::add_assign(&mut self.0, &other.0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> {
|
||||
type Output = Unclipped<T>;
|
||||
|
||||
fn add(self, rhs: Unclipped<T>) -> Self::Output {
|
||||
Unclipped(self.0 + rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Sub<T, Output = T>> Sub<Unclipped<T>> for Unclipped<T> {
|
||||
type Output = Unclipped<T>;
|
||||
|
||||
fn sub(self, rhs: Unclipped<T>) -> Self::Output {
|
||||
Unclipped(self.0 - rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AddAssign<T>> AddAssign<Unclipped<T>> for Unclipped<T> {
|
||||
fn add_assign(&mut self, rhs: Unclipped<T>) {
|
||||
self.0 += rhs.0;
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SubAssign<T>> SubAssign<Unclipped<T>> for Unclipped<T> {
|
||||
fn sub_assign(&mut self, rhs: Unclipped<T>) {
|
||||
self.0 -= rhs.0;
|
||||
}
|
||||
}
|
||||
419
crates/crdb/src/sync.rs
Normal file
@@ -0,0 +1,419 @@
|
||||
use crate::{
|
||||
btree::{self, Bias},
|
||||
messages::{Operation, PublishOperations},
|
||||
OperationId,
|
||||
};
|
||||
use bromberg_sl2::HashMatrix;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
iter,
|
||||
ops::{Range, RangeBounds},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct Digest {
|
||||
count: usize,
|
||||
hash: HashMatrix,
|
||||
}
|
||||
|
||||
impl btree::Item for Operation {
|
||||
type Summary = OperationSummary;
|
||||
|
||||
fn summary(&self) -> Self::Summary {
|
||||
OperationSummary {
|
||||
max_id: self.id(),
|
||||
digest: Digest {
|
||||
count: 1,
|
||||
hash: bromberg_sl2::hash_strict(&self.id().to_be_bytes()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::KeyedItem for Operation {
|
||||
type Key = OperationId;
|
||||
|
||||
fn key(&self) -> Self::Key {
|
||||
self.id()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct OperationSummary {
|
||||
max_id: OperationId,
|
||||
digest: Digest,
|
||||
}
|
||||
|
||||
impl btree::Summary for OperationSummary {
|
||||
type Context = ();
|
||||
|
||||
fn add_summary(&mut self, summary: &Self, _: &()) {
|
||||
debug_assert!(self.max_id < summary.max_id);
|
||||
self.max_id = summary.max_id;
|
||||
self.digest.count += summary.digest.count;
|
||||
self.digest.hash = self.digest.hash * summary.digest.hash;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for OperationId {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
debug_assert!(*self < summary.max_id);
|
||||
*self = summary.max_id;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for usize {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
*self += summary.digest.count;
|
||||
}
|
||||
}
|
||||
|
||||
impl btree::Dimension<'_, OperationSummary> for Digest {
|
||||
fn add_summary(&mut self, summary: &'_ OperationSummary, _: &()) {
|
||||
self.count += summary.digest.count;
|
||||
self.hash = self.hash * summary.digest.hash;
|
||||
}
|
||||
}
|
||||
|
||||
struct SyncRequest {
|
||||
digests: Vec<Digest>,
|
||||
}
|
||||
|
||||
struct SyncResponse {
|
||||
shared_prefix_end: usize,
|
||||
operations: Vec<Operation>,
|
||||
}
|
||||
|
||||
struct SyncStats {
|
||||
server_operations: usize,
|
||||
client_operations: usize,
|
||||
}
|
||||
|
||||
fn sync_server(
|
||||
operations: &mut btree::Sequence<Operation>,
|
||||
sync_request: SyncRequest,
|
||||
) -> SyncResponse {
|
||||
for client_digest in sync_request.digests {
|
||||
let server_digest = digest_for_range(operations, 0..client_digest.count);
|
||||
if server_digest == client_digest {
|
||||
return SyncResponse {
|
||||
shared_prefix_end: server_digest.count,
|
||||
operations: operations_for_range(operations, server_digest.count..)
|
||||
.cloned()
|
||||
.collect(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
SyncResponse {
|
||||
shared_prefix_end: 0,
|
||||
operations: operations.iter().cloned().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn publish_operations(
|
||||
server_operations: &mut btree::Sequence<Operation>,
|
||||
request: PublishOperations,
|
||||
) {
|
||||
server_operations.edit(
|
||||
request
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(btree::Edit::Insert)
|
||||
.collect(),
|
||||
&(),
|
||||
);
|
||||
}
|
||||
|
||||
fn sync_client(
|
||||
client_operations: &mut btree::Sequence<Operation>,
|
||||
server_operations: &mut btree::Sequence<Operation>,
|
||||
min_shared_prefix_end: usize,
|
||||
max_digest_count: usize,
|
||||
) -> SyncStats {
|
||||
let mut digests = Vec::new();
|
||||
let mut digest_end_ix = client_operations.summary().digest.count;
|
||||
// We will multiply by some some factor less than 1 to produce digests
|
||||
// over ever smaller digest ranges. The following formula ensures that
|
||||
// we will produce `max_digest_count` digests, and that the last digest
|
||||
// will go from `0` to `min_shared_prefix_end`.
|
||||
// op_count * factor^max_digest_count = min_shared_prefix_end
|
||||
// factor^max_digest_count = min_shared_prefix_end/op_count
|
||||
// max_digest_count * log_2(factor) = log_2(min_shared_prefix_end/op_count)
|
||||
// log_2(factor) = log_2(min_shared_prefix_end/op_count)/max_digest_count
|
||||
// factor = 2^(log_2(min_shared_prefix_end/op_count)/max_digest_count)
|
||||
let factor = 2f64.powf(
|
||||
(min_shared_prefix_end as f64 / digest_end_ix as f64).log2() / max_digest_count as f64,
|
||||
);
|
||||
for _ in 0..max_digest_count {
|
||||
if digest_end_ix <= min_shared_prefix_end {
|
||||
break;
|
||||
}
|
||||
|
||||
digests.push(digest_for_range(client_operations, 0..digest_end_ix));
|
||||
digest_end_ix = (digest_end_ix as f64 * factor).ceil() as usize; // 🪬
|
||||
}
|
||||
|
||||
let server_response = sync_server(server_operations, SyncRequest { digests });
|
||||
let new_ops_from_client = {
|
||||
let mut new_ops_from_client = Vec::new();
|
||||
let mut client_cursor = client_operations.cursor::<usize>();
|
||||
let mut new_client_operations =
|
||||
client_cursor.slice(&server_response.shared_prefix_end, Bias::Right, &());
|
||||
|
||||
let mut server_operations = server_response.operations.iter().peekable();
|
||||
let mut new_ops_from_server = Vec::new();
|
||||
while let Some(server_op) = server_operations.peek() {
|
||||
match client_cursor.item() {
|
||||
Some(client_operation) => {
|
||||
let comparison = server_op.id().cmp(&client_operation.id());
|
||||
match comparison {
|
||||
Ordering::Less => {
|
||||
new_ops_from_server.push(server_operations.next().unwrap().clone());
|
||||
}
|
||||
_ => {
|
||||
new_client_operations.extend(new_ops_from_server.drain(..), &());
|
||||
new_client_operations.push(client_operation.clone(), &());
|
||||
client_cursor.next(&());
|
||||
if comparison == Ordering::Equal {
|
||||
server_operations.next();
|
||||
} else {
|
||||
new_ops_from_client.push(client_operation.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
new_ops_from_server.push(server_operations.next().unwrap().clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
new_client_operations.extend(new_ops_from_server, &());
|
||||
|
||||
let client_suffix = client_cursor.suffix(&());
|
||||
new_client_operations.append(client_suffix.clone(), &());
|
||||
drop(client_cursor);
|
||||
*client_operations = new_client_operations;
|
||||
|
||||
new_ops_from_client.extend(client_suffix.iter().cloned());
|
||||
new_ops_from_client
|
||||
};
|
||||
|
||||
let sync_stats = SyncStats {
|
||||
server_operations: server_response.operations.len(),
|
||||
client_operations: new_ops_from_client.len(),
|
||||
};
|
||||
publish_operations(
|
||||
server_operations,
|
||||
PublishOperations {
|
||||
repo_id: Default::default(),
|
||||
operations: new_ops_from_client,
|
||||
},
|
||||
);
|
||||
|
||||
sync_stats
|
||||
}
|
||||
|
||||
fn digest_for_range(operations: &btree::Sequence<Operation>, range: Range<usize>) -> Digest {
|
||||
let mut cursor = operations.cursor::<usize>();
|
||||
cursor.seek(&range.start, Bias::Right, &());
|
||||
cursor.summary(&range.end, Bias::Right, &())
|
||||
}
|
||||
|
||||
fn operations_for_range<T: RangeBounds<usize>>(
|
||||
operations: &btree::Sequence<Operation>,
|
||||
range: T,
|
||||
) -> impl Iterator<Item = &Operation> {
|
||||
let mut cursor = operations.cursor::<usize>();
|
||||
match range.start_bound() {
|
||||
collections::Bound::Included(start) => {
|
||||
cursor.seek(start, Bias::Right, &());
|
||||
}
|
||||
collections::Bound::Excluded(start) => {
|
||||
cursor.seek(&(*start + 1), Bias::Right, &());
|
||||
}
|
||||
collections::Bound::Unbounded => cursor.next(&()),
|
||||
}
|
||||
|
||||
iter::from_fn(move || {
|
||||
if range.contains(cursor.start()) {
|
||||
let operation = cursor.item()?;
|
||||
cursor.next(&());
|
||||
Some(operation)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{operations, OperationCount};
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
|
||||
#[test]
|
||||
fn test_sync() {
|
||||
assert_sync(1..=10, 5..=10, 0, 16);
|
||||
assert_sync(1..=10, 4..=10, 0, 16);
|
||||
assert_sync(1..=10, 1..=5, 0, 16);
|
||||
assert_sync([1, 3, 5, 7, 9], [2, 4, 6, 8, 10], 0, 16);
|
||||
assert_sync([1, 2, 3, 4, 6, 7, 8, 9, 11, 12], [4, 5, 6, 10, 12], 0, 16);
|
||||
assert_sync(1..=10, 5..=14, 0, 16);
|
||||
assert_sync(1..=80, (1..=70).chain(90..=100), 0, 16);
|
||||
assert_sync(1..=1910, (1..=1900).chain(1910..=2000), 0, 16);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let max_operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let min_shared_prefix_end = 1024;
|
||||
let max_digest_count = 1024;
|
||||
|
||||
let mut connected = true;
|
||||
let mut client_ops = btree::Sequence::new();
|
||||
let mut server_ops = btree::Sequence::new();
|
||||
let mut ideal_server_ops = 0;
|
||||
let mut ideal_client_ops = 0;
|
||||
let mut next_reconnection = None;
|
||||
for ix in 1..=max_operations {
|
||||
if connected && rng.gen_bool(0.0005) {
|
||||
dbg!(ix);
|
||||
connected = false;
|
||||
|
||||
let mut factor = 0.0005;
|
||||
while rng.gen() {
|
||||
factor *= 2.0;
|
||||
}
|
||||
|
||||
let remaining_operations = max_operations - ix;
|
||||
let disconnection_period = (remaining_operations as f64 * factor) as usize;
|
||||
next_reconnection = Some(ix + disconnection_period);
|
||||
dbg!(disconnection_period);
|
||||
}
|
||||
|
||||
if next_reconnection == Some(ix) {
|
||||
connected = true;
|
||||
next_reconnection = None;
|
||||
log::debug!("===============");
|
||||
|
||||
let stats = sync_client(
|
||||
&mut client_ops,
|
||||
&mut server_ops,
|
||||
min_shared_prefix_end,
|
||||
max_digest_count,
|
||||
);
|
||||
log::debug!(
|
||||
"ideal server ops: {}, actual server ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_server_ops,
|
||||
stats.server_operations,
|
||||
stats.server_operations - ideal_server_ops,
|
||||
((stats.server_operations as f64 / ideal_server_ops as f64) - 1.) * 100.
|
||||
);
|
||||
log::debug!(
|
||||
"ideal client ops: {}, actual client ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_client_ops,
|
||||
stats.client_operations,
|
||||
stats.client_operations - ideal_client_ops,
|
||||
((stats.client_operations as f64 / ideal_client_ops as f64) - 1.0) * 100.
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
client_ops.iter().map(|op| op.id()).collect::<Vec<_>>(),
|
||||
server_ops.iter().map(|op| op.id()).collect::<Vec<_>>()
|
||||
);
|
||||
ideal_client_ops = 0;
|
||||
ideal_server_ops = 0;
|
||||
}
|
||||
|
||||
if connected {
|
||||
client_ops.push(build_operation(ix), &());
|
||||
server_ops.push(build_operation(ix), &());
|
||||
} else if rng.gen_bool(0.95) {
|
||||
ideal_server_ops += 1;
|
||||
server_ops.push(build_operation(ix), &());
|
||||
} else {
|
||||
ideal_client_ops += 1;
|
||||
client_ops.push(build_operation(ix), &());
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("============");
|
||||
let stats = sync_client(
|
||||
&mut client_ops,
|
||||
&mut server_ops,
|
||||
min_shared_prefix_end,
|
||||
max_digest_count,
|
||||
);
|
||||
log::debug!(
|
||||
"ideal server ops: {}, actual server ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_server_ops,
|
||||
stats.server_operations,
|
||||
stats.server_operations - ideal_server_ops,
|
||||
((stats.server_operations as f64 / ideal_server_ops as f64) - 1.) * 100.
|
||||
);
|
||||
log::debug!(
|
||||
"ideal client ops: {}, actual client ops: {}, abs error: {}, pct error: {:.3}%",
|
||||
ideal_client_ops,
|
||||
stats.client_operations,
|
||||
stats.client_operations - ideal_client_ops,
|
||||
((stats.client_operations as f64 / ideal_client_ops as f64) - 1.0) * 100.
|
||||
);
|
||||
assert_eq!(
|
||||
client_ops.iter().map(|op| op.id()).collect::<Vec<_>>(),
|
||||
server_ops.iter().map(|op| op.id()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
fn assert_sync(
|
||||
client_ops: impl IntoIterator<Item = usize>,
|
||||
server_ops: impl IntoIterator<Item = usize>,
|
||||
min_digest_delta: usize,
|
||||
max_digest_count: usize,
|
||||
) {
|
||||
let client_ops = client_ops
|
||||
.into_iter()
|
||||
.map(build_operation)
|
||||
.collect::<Vec<_>>();
|
||||
let server_ops = server_ops
|
||||
.into_iter()
|
||||
.map(build_operation)
|
||||
.collect::<Vec<_>>();
|
||||
let mut client_operations = btree::Sequence::from_iter(client_ops, &());
|
||||
let mut server_operations = btree::Sequence::from_iter(server_ops, &());
|
||||
sync_client(
|
||||
&mut client_operations,
|
||||
&mut server_operations,
|
||||
min_digest_delta,
|
||||
max_digest_count,
|
||||
);
|
||||
assert_eq!(
|
||||
client_operations
|
||||
.iter()
|
||||
.map(|op| op.id())
|
||||
.collect::<Vec<_>>(),
|
||||
server_operations
|
||||
.iter()
|
||||
.map(|op| op.id())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
fn build_operation(id: usize) -> Operation {
|
||||
Operation::CreateBranch(operations::CreateBranch {
|
||||
id: OperationId {
|
||||
replica_id: Default::default(),
|
||||
operation_count: OperationCount(id),
|
||||
},
|
||||
parent: Default::default(),
|
||||
name: "".into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn digest_counts(digests: &[Digest]) -> Vec<usize> {
|
||||
digests.iter().map(|d| d.count).collect()
|
||||
}
|
||||
}
|
||||
201
crates/crdb/src/test.rs
Normal file
@@ -0,0 +1,201 @@
|
||||
use crate::{ClientNetwork, ClientRoom, RoomCredentials, RoomName, RoomToken, ServerNetwork, User};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::BTreeMap;
|
||||
use futures::{channel::mpsc, future::BoxFuture, FutureExt, StreamExt};
|
||||
use gpui::BackgroundExecutor;
|
||||
use parking_lot::Mutex;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct TestNetwork(Arc<Mutex<NetworkState>>);
|
||||
|
||||
impl TestNetwork {
|
||||
pub fn new(executor: BackgroundExecutor) -> Self {
|
||||
Self(Arc::new(Mutex::new(NetworkState {
|
||||
executor,
|
||||
request_handler: None,
|
||||
rooms: Default::default(),
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn server(&self) -> TestServerNetwork {
|
||||
TestServerNetwork(self.0.clone())
|
||||
}
|
||||
|
||||
pub fn client(&self, login: impl Into<Arc<str>>) -> TestClientNetwork {
|
||||
TestClientNetwork {
|
||||
user: User {
|
||||
login: login.into(),
|
||||
},
|
||||
network: self.0.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct NetworkState {
|
||||
executor: BackgroundExecutor,
|
||||
request_handler:
|
||||
Option<Box<dyn Send + Fn(User, Vec<u8>) -> Result<BoxFuture<'static, Result<Vec<u8>>>>>>,
|
||||
rooms: BTreeMap<RoomName, Room>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Room {
|
||||
inboxes: BTreeMap<RoomToken, mpsc::UnboundedSender<Vec<u8>>>,
|
||||
authorized_users: BTreeMap<RoomToken, Arc<str>>,
|
||||
next_token_id: usize,
|
||||
}
|
||||
|
||||
pub struct TestServerNetwork(Arc<Mutex<NetworkState>>);
|
||||
|
||||
impl ServerNetwork for TestServerNetwork {
|
||||
fn create_room(&self, name: &RoomName) -> BoxFuture<Result<()>> {
|
||||
let network = self.0.clone();
|
||||
let room = name.clone();
|
||||
async move {
|
||||
let executor = network.lock().executor.clone();
|
||||
executor.simulate_random_delay().await;
|
||||
network.lock().rooms.insert(room, Default::default());
|
||||
Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn grant_room_access(&self, room: &RoomName, user: &str) -> RoomToken {
|
||||
let mut network = self.0.lock();
|
||||
let room = network.rooms.get_mut(&room).expect("room must exist");
|
||||
let token_id = room.next_token_id;
|
||||
room.next_token_id += 1;
|
||||
let token = RoomToken(format!("{}/{}", token_id, user).into());
|
||||
room.authorized_users.insert(token.clone(), user.into());
|
||||
token
|
||||
}
|
||||
|
||||
fn handle_requests<H, F>(&self, handle_request: H)
|
||||
where
|
||||
H: 'static + Send + Fn(User, Vec<u8>) -> Result<F>,
|
||||
F: 'static + Send + futures::Future<Output = Result<Vec<u8>>>,
|
||||
{
|
||||
self.0.lock().request_handler = Some(Box::new(move |user, request| {
|
||||
handle_request(user, request.clone()).map(FutureExt::boxed)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestClientNetwork {
|
||||
user: User,
|
||||
network: Arc<Mutex<NetworkState>>,
|
||||
}
|
||||
|
||||
impl ClientNetwork for TestClientNetwork {
|
||||
type Room = TestClientRoom;
|
||||
|
||||
fn request(&self, request: Vec<u8>) -> BoxFuture<Result<Vec<u8>>> {
|
||||
let response =
|
||||
self.network.lock().request_handler.as_ref().unwrap()(self.user.clone(), request);
|
||||
async move { response?.await }.boxed()
|
||||
}
|
||||
|
||||
fn room(&self, credentials: RoomCredentials) -> Self::Room {
|
||||
TestClientRoom {
|
||||
outbox: Default::default(),
|
||||
credentials,
|
||||
message_handler: Default::default(),
|
||||
network: self.network.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestClientRoom {
|
||||
outbox: Option<mpsc::UnboundedSender<Vec<u8>>>,
|
||||
credentials: RoomCredentials,
|
||||
message_handler: Arc<Mutex<Option<Box<dyn Send + Fn(Vec<u8>)>>>>,
|
||||
network: Arc<Mutex<NetworkState>>,
|
||||
}
|
||||
|
||||
impl ClientRoom for TestClientRoom {
|
||||
fn connect(&mut self) -> BoxFuture<Result<()>> {
|
||||
assert!(
|
||||
self.outbox.is_none(),
|
||||
"client should not connect more than once"
|
||||
);
|
||||
|
||||
let (inbox_tx, mut inbox_rx) = mpsc::unbounded();
|
||||
{
|
||||
let mut network = self.network.lock();
|
||||
let room = network
|
||||
.rooms
|
||||
.get_mut(&self.credentials.name)
|
||||
.expect("room should exist");
|
||||
|
||||
if !room.authorized_users.contains_key(&self.credentials.token) {
|
||||
return std::future::ready(Err(anyhow!(
|
||||
"token {:?} is not authorized to enter room {:?}",
|
||||
self.credentials.token,
|
||||
self.credentials.name
|
||||
)))
|
||||
.boxed();
|
||||
}
|
||||
|
||||
let existing_inbox = room
|
||||
.inboxes
|
||||
.insert(self.credentials.token.clone(), inbox_tx);
|
||||
assert!(
|
||||
existing_inbox.is_none(),
|
||||
"client should not connect twice with the same token"
|
||||
);
|
||||
}
|
||||
let message_handler = self.message_handler.clone();
|
||||
self.network
|
||||
.lock()
|
||||
.executor
|
||||
.spawn(async move {
|
||||
while let Some(message) = inbox_rx.next().await {
|
||||
if let Some(handler) = message_handler.lock().as_ref() {
|
||||
handler(message);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
// Send outbound messages to other clients in the room.
|
||||
let (outbox_tx, mut outbox_rx) = mpsc::unbounded();
|
||||
self.outbox = Some(outbox_tx);
|
||||
let executor = self.network.lock().executor.clone();
|
||||
let network = self.network.clone();
|
||||
let credentials = self.credentials.clone();
|
||||
self.network
|
||||
.lock()
|
||||
.executor
|
||||
.spawn(async move {
|
||||
while let Some(message) = outbox_rx.next().await {
|
||||
let inboxes = network
|
||||
.lock()
|
||||
.rooms
|
||||
.get(&credentials.name)
|
||||
.map(|room| room.inboxes.clone());
|
||||
if let Some(inboxes) = inboxes {
|
||||
for (inbox_token, inbox) in inboxes {
|
||||
executor.simulate_random_delay().await;
|
||||
if inbox_token != credentials.token {
|
||||
let _ = inbox.unbounded_send(message.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
async { Ok(()) }.boxed()
|
||||
}
|
||||
|
||||
fn broadcast(&self, message: Vec<u8>) {
|
||||
let tx = self.outbox.as_ref().expect("must be connected");
|
||||
tx.unbounded_send(message).expect("channel must be open");
|
||||
}
|
||||
|
||||
fn handle_messages(&self, handle_message: impl 'static + Send + Fn(Vec<u8>)) {
|
||||
self.message_handler
|
||||
.lock()
|
||||
.replace(Box::new(handle_message));
|
||||
}
|
||||
}
|
||||
@@ -102,6 +102,9 @@ impl Render for ProjectDiagnosticsEditor {
|
||||
|
||||
div()
|
||||
.track_focus(&self.focus_handle)
|
||||
.when(self.path_states.is_empty(), |el| {
|
||||
el.key_context("EmptyPane")
|
||||
})
|
||||
.size_full()
|
||||
.on_action(cx.listener(Self::toggle_warnings))
|
||||
.child(child)
|
||||
|
||||
@@ -268,6 +268,7 @@ gpui::actions!(
|
||||
SelectAllMatches,
|
||||
SelectDown,
|
||||
SelectLargerSyntaxNode,
|
||||
SelectEnclosingSymbol,
|
||||
SelectLeft,
|
||||
SelectLine,
|
||||
SelectRight,
|
||||
|
||||
@@ -720,8 +720,7 @@ impl DisplaySnapshot {
|
||||
if let Some(severity) = chunk.diagnostic_severity {
|
||||
// Omit underlines for HINT/INFO diagnostics on 'unnecessary' code.
|
||||
if severity <= DiagnosticSeverity::WARNING || !chunk.is_unnecessary {
|
||||
let diagnostic_color =
|
||||
super::diagnostic_style(severity, true, &editor_style.status);
|
||||
let diagnostic_color = super::diagnostic_style(severity, &editor_style.status);
|
||||
diagnostic_highlight.underline = Some(UnderlineStyle {
|
||||
color: Some(diagnostic_color),
|
||||
thickness: 1.0.into(),
|
||||
@@ -957,16 +956,18 @@ impl DisplaySnapshot {
|
||||
return false;
|
||||
}
|
||||
|
||||
for next_row in (buffer_row.0 + 1)..=max_row.0 {
|
||||
let next_line_indent = self.line_indent_for_buffer_row(MultiBufferRow(next_row));
|
||||
if next_line_indent.raw_len() > line_indent.raw_len() {
|
||||
return true;
|
||||
} else if !next_line_indent.is_line_blank() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
(buffer_row.0 + 1..=max_row.0)
|
||||
.find_map(|next_row| {
|
||||
let next_line_indent = self.line_indent_for_buffer_row(MultiBufferRow(next_row));
|
||||
if next_line_indent.raw_len() > line_indent.raw_len() {
|
||||
Some(true)
|
||||
} else if !next_line_indent.is_line_blank() {
|
||||
Some(false)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn foldable_range(
|
||||
|
||||
@@ -1221,7 +1221,7 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::display_map::inlay_map::InlayMap;
|
||||
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
|
||||
use gpui::{div, font, px, AssetSource, Element};
|
||||
use gpui::{div, font, px, Element};
|
||||
use multi_buffer::MultiBuffer;
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
@@ -2014,12 +2014,7 @@ mod tests {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
cx.text_system()
|
||||
.add_fonts(vec![assets::Assets
|
||||
.load("fonts/zed-mono/zed-mono-extended.ttf")
|
||||
.unwrap()
|
||||
.unwrap()])
|
||||
.unwrap();
|
||||
assets::Assets.load_test_fonts(cx);
|
||||
}
|
||||
|
||||
impl TransformBlock {
|
||||
|
||||
@@ -335,7 +335,7 @@ pub enum SelectMode {
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum EditorMode {
|
||||
SingleLine,
|
||||
SingleLine { auto_width: bool },
|
||||
AutoHeight { max_lines: usize },
|
||||
Full,
|
||||
}
|
||||
@@ -1580,7 +1580,13 @@ impl Editor {
|
||||
pub fn single_line(cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(EditorMode::SingleLine, buffer, None, false, cx)
|
||||
Self::new(
|
||||
EditorMode::SingleLine { auto_width: false },
|
||||
buffer,
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn multi_line(cx: &mut ViewContext<Self>) -> Self {
|
||||
@@ -1589,6 +1595,18 @@ impl Editor {
|
||||
Self::new(EditorMode::Full, buffer, None, false, cx)
|
||||
}
|
||||
|
||||
pub fn auto_width(cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
Self::new(
|
||||
EditorMode::SingleLine { auto_width: true },
|
||||
buffer,
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn auto_height(max_lines: usize, cx: &mut ViewContext<Self>) -> Self {
|
||||
let buffer = cx.new_model(|cx| Buffer::local("", cx));
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
@@ -1701,8 +1719,8 @@ impl Editor {
|
||||
|
||||
let blink_manager = cx.new_model(|cx| BlinkManager::new(CURSOR_BLINK_INTERVAL, cx));
|
||||
|
||||
let soft_wrap_mode_override =
|
||||
(mode == EditorMode::SingleLine).then(|| language_settings::SoftWrap::PreferLine);
|
||||
let soft_wrap_mode_override = matches!(mode, EditorMode::SingleLine { .. })
|
||||
.then(|| language_settings::SoftWrap::PreferLine);
|
||||
|
||||
let mut project_subscriptions = Vec::new();
|
||||
if mode == EditorMode::Full {
|
||||
@@ -1749,7 +1767,7 @@ impl Editor {
|
||||
.detach();
|
||||
cx.on_blur(&focus_handle, Self::handle_blur).detach();
|
||||
|
||||
let show_indent_guides = if mode == EditorMode::SingleLine {
|
||||
let show_indent_guides = if matches!(mode, EditorMode::SingleLine { .. }) {
|
||||
Some(false)
|
||||
} else {
|
||||
None
|
||||
@@ -1905,7 +1923,7 @@ impl Editor {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("Editor");
|
||||
let mode = match self.mode {
|
||||
EditorMode::SingleLine => "single_line",
|
||||
EditorMode::SingleLine { .. } => "single_line",
|
||||
EditorMode::AutoHeight { .. } => "auto_height",
|
||||
EditorMode::Full => "full",
|
||||
};
|
||||
@@ -2113,7 +2131,7 @@ impl Editor {
|
||||
self.refresh_inline_completion(false, cx);
|
||||
}
|
||||
|
||||
pub fn placeholder_text(&self, _cx: &mut WindowContext) -> Option<&str> {
|
||||
pub fn placeholder_text(&self, _cx: &WindowContext) -> Option<&str> {
|
||||
self.placeholder_text.as_deref()
|
||||
}
|
||||
|
||||
@@ -2896,6 +2914,9 @@ impl Editor {
|
||||
let start_offset = TO::to_offset(&range.start, &buffer_snapshot);
|
||||
let end_offset = start_offset + end_difference;
|
||||
let start_offset = start_offset + start_difference;
|
||||
if start_offset > buffer_snapshot.len() || end_offset > buffer_snapshot.len() {
|
||||
continue;
|
||||
}
|
||||
let start = buffer_snapshot.anchor_after(start_offset);
|
||||
let end = buffer_snapshot.anchor_after(end_offset);
|
||||
linked_edits
|
||||
@@ -3102,14 +3123,24 @@ impl Editor {
|
||||
let anchor = snapshot.anchor_after(selection.end);
|
||||
if !self.linked_edit_ranges.is_empty() {
|
||||
let start_anchor = snapshot.anchor_before(selection.start);
|
||||
if let Some(ranges) =
|
||||
self.linked_editing_ranges_for(start_anchor.text_anchor..anchor.text_anchor, cx)
|
||||
{
|
||||
for (buffer, edits) in ranges {
|
||||
linked_edits
|
||||
.entry(buffer.clone())
|
||||
.or_default()
|
||||
.extend(edits.into_iter().map(|range| (range, text.clone())));
|
||||
|
||||
let is_word_char = text.chars().next().map_or(true, |char| {
|
||||
let scope = snapshot.language_scope_at(start_anchor.to_offset(&snapshot));
|
||||
let kind = char_kind(&scope, char);
|
||||
|
||||
kind == CharKind::Word
|
||||
});
|
||||
|
||||
if is_word_char {
|
||||
if let Some(ranges) = self
|
||||
.linked_editing_ranges_for(start_anchor.text_anchor..anchor.text_anchor, cx)
|
||||
{
|
||||
for (buffer, edits) in ranges {
|
||||
linked_edits
|
||||
.entry(buffer.clone())
|
||||
.or_default()
|
||||
.extend(edits.into_iter().map(|range| (range, text.clone())));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6660,7 +6691,7 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -6697,7 +6728,7 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -6728,7 +6759,7 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -6791,7 +6822,17 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if self
|
||||
.context_menu
|
||||
.write()
|
||||
.as_mut()
|
||||
.map(|menu| menu.select_first(self.project.as_ref(), cx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -6839,7 +6880,7 @@ impl Editor {
|
||||
pub fn move_down(&mut self, _: &MoveDown, cx: &mut ViewContext<Self>) {
|
||||
self.take_rename(true, cx);
|
||||
|
||||
if self.mode == EditorMode::SingleLine {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -6900,7 +6941,7 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7248,7 +7289,7 @@ impl Editor {
|
||||
_: &MoveToStartOfParagraph,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7268,7 +7309,7 @@ impl Editor {
|
||||
_: &MoveToEndOfParagraph,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7288,7 +7329,7 @@ impl Editor {
|
||||
_: &SelectToStartOfParagraph,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7308,7 +7349,7 @@ impl Editor {
|
||||
_: &SelectToEndOfParagraph,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7324,7 +7365,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn move_to_beginning(&mut self, _: &MoveToBeginning, cx: &mut ViewContext<Self>) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -7344,7 +7385,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn move_to_end(&mut self, _: &MoveToEnd, cx: &mut ViewContext<Self>) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
if matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
cx.propagate();
|
||||
return;
|
||||
}
|
||||
@@ -8203,7 +8244,7 @@ impl Editor {
|
||||
let advance_downwards = action.advance_downwards
|
||||
&& selections_on_single_row
|
||||
&& !selections_selecting
|
||||
&& this.mode != EditorMode::SingleLine;
|
||||
&& !matches!(this.mode, EditorMode::SingleLine { .. });
|
||||
|
||||
if advance_downwards {
|
||||
let snapshot = this.buffer.read(cx).snapshot(cx);
|
||||
@@ -8226,6 +8267,58 @@ impl Editor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn select_enclosing_symbol(
|
||||
&mut self,
|
||||
_: &SelectEnclosingSymbol,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let old_selections = self.selections.all::<usize>(cx).into_boxed_slice();
|
||||
|
||||
fn update_selection(
|
||||
selection: &Selection<usize>,
|
||||
buffer_snap: &MultiBufferSnapshot,
|
||||
) -> Option<Selection<usize>> {
|
||||
let cursor = selection.head();
|
||||
let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?;
|
||||
for symbol in symbols.iter().rev() {
|
||||
let start = symbol.range.start.to_offset(&buffer_snap);
|
||||
let end = symbol.range.end.to_offset(&buffer_snap);
|
||||
let new_range = start..end;
|
||||
if start < selection.start || end > selection.end {
|
||||
return Some(Selection {
|
||||
id: selection.id,
|
||||
start: new_range.start,
|
||||
end: new_range.end,
|
||||
goal: SelectionGoal::None,
|
||||
reversed: selection.reversed,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
let mut selected_larger_symbol = false;
|
||||
let new_selections = old_selections
|
||||
.iter()
|
||||
.map(|selection| match update_selection(selection, &buffer) {
|
||||
Some(new_selection) => {
|
||||
if new_selection.range() != selection.range() {
|
||||
selected_larger_symbol = true;
|
||||
}
|
||||
new_selection
|
||||
}
|
||||
None => selection.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if selected_larger_symbol {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(new_selections);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select_larger_syntax_node(
|
||||
&mut self,
|
||||
_: &SelectLargerSyntaxNode,
|
||||
@@ -8386,13 +8479,14 @@ impl Editor {
|
||||
runnable: &mut Runnable,
|
||||
cx: &WindowContext<'_>,
|
||||
) -> Vec<(TaskSourceKind, TaskTemplate)> {
|
||||
let (inventory, worktree_id) = project.read_with(cx, |project, cx| {
|
||||
let worktree_id = project
|
||||
let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| {
|
||||
let (worktree_id, file) = project
|
||||
.buffer_for_id(runnable.buffer)
|
||||
.and_then(|buffer| buffer.read(cx).file())
|
||||
.map(|file| WorktreeId::from_usize(file.worktree_id()));
|
||||
.map(|file| (WorktreeId::from_usize(file.worktree_id()), file.clone()))
|
||||
.unzip();
|
||||
|
||||
(project.task_inventory().clone(), worktree_id)
|
||||
(project.task_inventory().clone(), worktree_id, file)
|
||||
});
|
||||
|
||||
let inventory = inventory.read(cx);
|
||||
@@ -8402,7 +8496,12 @@ impl Editor {
|
||||
.flat_map(|tag| {
|
||||
let tag = tag.0.clone();
|
||||
inventory
|
||||
.list_tasks(Some(runnable.language.clone()), worktree_id)
|
||||
.list_tasks(
|
||||
file.clone(),
|
||||
Some(runnable.language.clone()),
|
||||
worktree_id,
|
||||
cx,
|
||||
)
|
||||
.into_iter()
|
||||
.filter(move |(_, template)| {
|
||||
template.tags.iter().any(|source_tag| source_tag == &tag)
|
||||
@@ -8743,13 +8842,7 @@ impl Editor {
|
||||
let display_point = initial_point.to_display_point(snapshot);
|
||||
let mut hunks = hunks
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, &snapshot))
|
||||
.filter(|hunk| {
|
||||
if is_wrapped {
|
||||
true
|
||||
} else {
|
||||
!hunk.contains_display_row(display_point.row())
|
||||
}
|
||||
})
|
||||
.filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row()))
|
||||
.dedup();
|
||||
|
||||
if let Some(hunk) = hunks.next() {
|
||||
@@ -12027,7 +12120,7 @@ impl Render for Editor {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
|
||||
let text_style = match self.mode {
|
||||
EditorMode::SingleLine | EditorMode::AutoHeight { .. } => TextStyle {
|
||||
EditorMode::SingleLine { .. } | EditorMode::AutoHeight { .. } => TextStyle {
|
||||
color: cx.theme().colors().editor_foreground,
|
||||
font_family: settings.ui_font.family.clone(),
|
||||
font_features: settings.ui_font.features.clone(),
|
||||
@@ -12056,7 +12149,7 @@ impl Render for Editor {
|
||||
};
|
||||
|
||||
let background = match self.mode {
|
||||
EditorMode::SingleLine => cx.theme().system().transparent,
|
||||
EditorMode::SingleLine { .. } => cx.theme().system().transparent,
|
||||
EditorMode::AutoHeight { max_lines: _ } => cx.theme().system().transparent,
|
||||
EditorMode::Full => cx.theme().colors().editor_background,
|
||||
};
|
||||
@@ -12320,6 +12413,7 @@ impl ViewInputHandler for Editor {
|
||||
let font_id = cx.text_system().resolve_font(&style.text.font());
|
||||
let font_size = style.text.font_size.to_pixels(cx.rem_size());
|
||||
let line_height = style.text.line_height_in_pixels(cx.rem_size());
|
||||
|
||||
let em_width = cx
|
||||
.text_system()
|
||||
.typographic_bounds(font_id, font_size, 'm')
|
||||
@@ -12447,7 +12541,7 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
|
||||
let group_id: SharedString = cx.block_id.to_string().into();
|
||||
|
||||
let mut text_style = cx.text_style().clone();
|
||||
text_style.color = diagnostic_style(diagnostic.severity, true, cx.theme().status());
|
||||
text_style.color = diagnostic_style(diagnostic.severity, cx.theme().status());
|
||||
let theme_settings = ThemeSettings::get_global(cx);
|
||||
text_style.font_family = theme_settings.buffer_font.family.clone();
|
||||
text_style.font_style = theme_settings.buffer_font.style;
|
||||
@@ -12543,25 +12637,19 @@ pub fn highlight_diagnostic_message(diagnostic: &Diagnostic) -> (SharedString, V
|
||||
prev_offset = ix + 1;
|
||||
if in_code_block {
|
||||
code_ranges.push(prev_len..text_without_backticks.len());
|
||||
in_code_block = false;
|
||||
} else {
|
||||
in_code_block = true;
|
||||
}
|
||||
in_code_block = !in_code_block;
|
||||
}
|
||||
|
||||
(text_without_backticks.into(), code_ranges)
|
||||
}
|
||||
|
||||
fn diagnostic_style(severity: DiagnosticSeverity, valid: bool, colors: &StatusColors) -> Hsla {
|
||||
match (severity, valid) {
|
||||
(DiagnosticSeverity::ERROR, true) => colors.error,
|
||||
(DiagnosticSeverity::ERROR, false) => colors.error,
|
||||
(DiagnosticSeverity::WARNING, true) => colors.warning,
|
||||
(DiagnosticSeverity::WARNING, false) => colors.warning,
|
||||
(DiagnosticSeverity::INFORMATION, true) => colors.info,
|
||||
(DiagnosticSeverity::INFORMATION, false) => colors.info,
|
||||
(DiagnosticSeverity::HINT, true) => colors.info,
|
||||
(DiagnosticSeverity::HINT, false) => colors.info,
|
||||
fn diagnostic_style(severity: DiagnosticSeverity, colors: &StatusColors) -> Hsla {
|
||||
match severity {
|
||||
DiagnosticSeverity::ERROR => colors.error,
|
||||
DiagnosticSeverity::WARNING => colors.warning,
|
||||
DiagnosticSeverity::INFORMATION => colors.info,
|
||||
DiagnosticSeverity::HINT => colors.info,
|
||||
_ => colors.ignored,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,8 +10,8 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
div, AssetSource, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext,
|
||||
WindowBounds, WindowOptions,
|
||||
div, SemanticVersion, TestAppContext, UpdateGlobal, VisualTestContext, WindowBounds,
|
||||
WindowOptions,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
@@ -7019,6 +7019,73 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
apply_additional_edits.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_completion_page_up_down_keys(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(lsp::CompletionOptions {
|
||||
trigger_characters: Some(vec![".".to_string()]),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
cx.lsp
|
||||
.handle_request::<lsp::request::Completion, _, _>(move |_, _| async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
label: "first".into(),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "last".into(),
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
});
|
||||
cx.set_state("variableˇ");
|
||||
cx.simulate_keystroke(".");
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
cx.update_editor(|editor, _| {
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert_eq!(
|
||||
menu.matches.iter().map(|m| &m.string).collect::<Vec<_>>(),
|
||||
&["first", "last"]
|
||||
);
|
||||
} else {
|
||||
panic!("expected completion menu to be open");
|
||||
}
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.move_page_down(&MovePageDown::default(), cx);
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert!(
|
||||
menu.selected_item == 1,
|
||||
"expected PageDown to select the last item from the context menu"
|
||||
);
|
||||
} else {
|
||||
panic!("expected completion menu to stay open after PageDown");
|
||||
}
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
editor.move_page_up(&MovePageUp::default(), cx);
|
||||
if let Some(ContextMenu::Completions(menu)) = editor.context_menu.read().as_ref() {
|
||||
assert!(
|
||||
menu.selected_item == 0,
|
||||
"expected PageUp to select the first item from the context menu"
|
||||
);
|
||||
} else {
|
||||
panic!("expected completion menu to stay open after PageUp");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_no_duplicated_completion_requests(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -12489,12 +12556,7 @@ pub(crate) fn update_test_project_settings(
|
||||
|
||||
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||
_ = cx.update(|cx| {
|
||||
cx.text_system()
|
||||
.add_fonts(vec![assets::Assets
|
||||
.load("fonts/zed-mono/zed-mono-extended.ttf")
|
||||
.unwrap()
|
||||
.unwrap()])
|
||||
.unwrap();
|
||||
assets::Assets.load_test_fonts(cx);
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
|
||||
@@ -276,6 +276,7 @@ impl EditorElement {
|
||||
register_action(view, cx, Editor::toggle_comments);
|
||||
register_action(view, cx, Editor::select_larger_syntax_node);
|
||||
register_action(view, cx, Editor::select_smaller_syntax_node);
|
||||
register_action(view, cx, Editor::select_enclosing_symbol);
|
||||
register_action(view, cx, Editor::move_to_enclosing_bracket);
|
||||
register_action(view, cx, Editor::undo_selection);
|
||||
register_action(view, cx, Editor::redo_selection);
|
||||
@@ -1118,11 +1119,12 @@ impl EditorElement {
|
||||
ScrollBeyondLastLine::Off => 1.0,
|
||||
ScrollBeyondLastLine::VerticalScrollMargin => 1.0 + settings.vertical_scroll_margin,
|
||||
};
|
||||
let total_rows = snapshot.max_point().row().as_f32() + scroll_beyond_last_line;
|
||||
let total_rows =
|
||||
(snapshot.max_point().row().as_f32() + scroll_beyond_last_line).max(rows_per_page);
|
||||
let height = bounds.size.height;
|
||||
let px_per_row = height / total_rows;
|
||||
let thumb_height = (rows_per_page * px_per_row).max(ScrollbarLayout::MIN_THUMB_HEIGHT);
|
||||
let row_height = (height - thumb_height) / (total_rows - rows_per_page).max(0.0);
|
||||
let row_height = (height - thumb_height) / (total_rows - rows_per_page).max(0.);
|
||||
|
||||
Some(ScrollbarLayout {
|
||||
hitbox: cx.insert_hitbox(track_bounds, false),
|
||||
@@ -1830,10 +1832,10 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
fn layout_lines(
|
||||
&self,
|
||||
rows: Range<DisplayRow>,
|
||||
line_number_layouts: &[Option<ShapedLine>],
|
||||
snapshot: &EditorSnapshot,
|
||||
style: &EditorStyle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<LineWithInvisibles> {
|
||||
if rows.start >= rows.end {
|
||||
@@ -1842,7 +1844,7 @@ impl EditorElement {
|
||||
|
||||
// Show the placeholder when the editor is empty
|
||||
if snapshot.is_empty() {
|
||||
let font_size = self.style.text.font_size.to_pixels(cx.rem_size());
|
||||
let font_size = style.text.font_size.to_pixels(cx.rem_size());
|
||||
let placeholder_color = cx.theme().colors().text_placeholder;
|
||||
let placeholder_text = snapshot.placeholder_text();
|
||||
|
||||
@@ -1857,7 +1859,7 @@ impl EditorElement {
|
||||
.filter_map(move |line| {
|
||||
let run = TextRun {
|
||||
len: line.len(),
|
||||
font: self.style.text.font(),
|
||||
font: style.text.font(),
|
||||
color: placeholder_color,
|
||||
background_color: None,
|
||||
underline: Default::default(),
|
||||
@@ -1876,10 +1878,10 @@ impl EditorElement {
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
let chunks = snapshot.highlighted_chunks(rows.clone(), true, &self.style);
|
||||
let chunks = snapshot.highlighted_chunks(rows.clone(), true, style);
|
||||
LineWithInvisibles::from_chunks(
|
||||
chunks,
|
||||
&self.style.text,
|
||||
&style.text,
|
||||
MAX_LINE_LEN,
|
||||
rows.len(),
|
||||
line_number_layouts,
|
||||
@@ -4474,7 +4476,7 @@ impl EditorElement {
|
||||
// We currently use single-line and auto-height editors in UI contexts,
|
||||
// so we don't want to scale everything with the buffer font size, as it
|
||||
// ends up looking off.
|
||||
EditorMode::SingleLine | EditorMode::AutoHeight { .. } => None,
|
||||
EditorMode::SingleLine { .. } | EditorMode::AutoHeight { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4498,12 +4500,43 @@ impl Element for EditorElement {
|
||||
editor.set_style(self.style.clone(), cx);
|
||||
|
||||
let layout_id = match editor.mode {
|
||||
EditorMode::SingleLine => {
|
||||
EditorMode::SingleLine { auto_width } => {
|
||||
let rem_size = cx.rem_size();
|
||||
let mut style = Style::default();
|
||||
style.size.width = relative(1.).into();
|
||||
style.size.height = self.style.text.line_height_in_pixels(rem_size).into();
|
||||
cx.request_layout(style, None)
|
||||
|
||||
let height = self.style.text.line_height_in_pixels(rem_size);
|
||||
if auto_width {
|
||||
let editor_handle = cx.view().clone();
|
||||
let style = self.style.clone();
|
||||
cx.request_measured_layout(Style::default(), move |_, _, cx| {
|
||||
let editor_snapshot =
|
||||
editor_handle.update(cx, |editor, cx| editor.snapshot(cx));
|
||||
let line = Self::layout_lines(
|
||||
DisplayRow(0)..DisplayRow(1),
|
||||
&[],
|
||||
&editor_snapshot,
|
||||
&style,
|
||||
cx,
|
||||
)
|
||||
.pop()
|
||||
.unwrap();
|
||||
|
||||
let font_id = cx.text_system().resolve_font(&style.text.font());
|
||||
let font_size = style.text.font_size.to_pixels(cx.rem_size());
|
||||
let em_width = cx
|
||||
.text_system()
|
||||
.typographic_bounds(font_id, font_size, 'm')
|
||||
.unwrap()
|
||||
.size
|
||||
.width;
|
||||
|
||||
size(line.width + em_width, height)
|
||||
})
|
||||
} else {
|
||||
let mut style = Style::default();
|
||||
style.size.height = height.into();
|
||||
style.size.width = relative(1.).into();
|
||||
cx.request_layout(style, None)
|
||||
}
|
||||
}
|
||||
EditorMode::AutoHeight { max_lines } => {
|
||||
let editor_handle = cx.view().clone();
|
||||
@@ -4644,17 +4677,17 @@ impl Element for EditorElement {
|
||||
text_hitbox.origin + point(gutter_dimensions.margin, Pixels::ZERO);
|
||||
|
||||
let height_in_lines = bounds.size.height / line_height;
|
||||
let max_row = snapshot.max_point().row().as_f32();
|
||||
let max_scroll_top = if matches!(snapshot.mode, EditorMode::AutoHeight { .. }) {
|
||||
(snapshot.max_point().row().as_f32() - height_in_lines + 1.).max(0.)
|
||||
(max_row - height_in_lines + 1.).max(0.)
|
||||
} else {
|
||||
let settings = EditorSettings::get_global(cx);
|
||||
let max_row = snapshot.max_point().row().as_f32();
|
||||
match settings.scroll_beyond_last_line {
|
||||
ScrollBeyondLastLine::OnePage => max_row,
|
||||
ScrollBeyondLastLine::Off => (max_row - height_in_lines + 1.0).max(0.0),
|
||||
ScrollBeyondLastLine::Off => (max_row - height_in_lines + 1.).max(0.),
|
||||
ScrollBeyondLastLine::VerticalScrollMargin => {
|
||||
(max_row - height_in_lines + 1.0 + settings.vertical_scroll_margin)
|
||||
.max(0.0)
|
||||
(max_row - height_in_lines + 1. + settings.vertical_scroll_margin)
|
||||
.max(0.)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -4762,8 +4795,13 @@ impl Element for EditorElement {
|
||||
);
|
||||
|
||||
let mut max_visible_line_width = Pixels::ZERO;
|
||||
let mut line_layouts =
|
||||
self.layout_lines(start_row..end_row, &line_numbers, &snapshot, cx);
|
||||
let mut line_layouts = Self::layout_lines(
|
||||
start_row..end_row,
|
||||
&line_numbers,
|
||||
&snapshot,
|
||||
&self.style,
|
||||
cx,
|
||||
);
|
||||
for line_with_invisibles in &line_layouts {
|
||||
if line_with_invisibles.width > max_visible_line_width {
|
||||
max_visible_line_width = line_with_invisibles.width;
|
||||
@@ -4791,16 +4829,43 @@ impl Element for EditorElement {
|
||||
)
|
||||
});
|
||||
|
||||
let scroll_pixel_position = point(
|
||||
scroll_position.x * em_width,
|
||||
scroll_position.y * line_height,
|
||||
);
|
||||
|
||||
let start_buffer_row =
|
||||
MultiBufferRow(start_anchor.to_point(&snapshot.buffer_snapshot).row);
|
||||
let end_buffer_row =
|
||||
MultiBufferRow(end_anchor.to_point(&snapshot.buffer_snapshot).row);
|
||||
|
||||
let scroll_max = point(
|
||||
((scroll_width - text_hitbox.size.width) / em_width).max(0.0),
|
||||
max_row.as_f32(),
|
||||
);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let clamped = editor.scroll_manager.clamp_scroll_left(scroll_max.x);
|
||||
|
||||
let autoscrolled = if autoscroll_horizontally {
|
||||
editor.autoscroll_horizontally(
|
||||
start_row,
|
||||
text_hitbox.size.width,
|
||||
scroll_width,
|
||||
em_width,
|
||||
&line_layouts,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if clamped || autoscrolled {
|
||||
snapshot = editor.snapshot(cx);
|
||||
scroll_position = snapshot.scroll_position();
|
||||
}
|
||||
});
|
||||
|
||||
let scroll_pixel_position = point(
|
||||
scroll_position.x * em_width,
|
||||
scroll_position.y * line_height,
|
||||
);
|
||||
|
||||
let indent_guides = self.layout_indent_guides(
|
||||
content_origin,
|
||||
text_hitbox.origin,
|
||||
@@ -6064,7 +6129,7 @@ mod tests {
|
||||
});
|
||||
|
||||
for editor_mode_without_invisibles in [
|
||||
EditorMode::SingleLine,
|
||||
EditorMode::SingleLine { auto_width: false },
|
||||
EditorMode::AutoHeight { max_lines: 100 },
|
||||
] {
|
||||
let invisibles = collect_invisibles_from_new_editor(
|
||||
|
||||
@@ -165,10 +165,16 @@ pub fn indent_guides_in_range(
|
||||
.indent_guides_in_range(start_anchor..end_anchor, ignore_disabled_for_language, cx)
|
||||
.into_iter()
|
||||
.filter(|indent_guide| {
|
||||
let start =
|
||||
MultiBufferRow(indent_guide.multibuffer_row_range.start.0.saturating_sub(1));
|
||||
// Filter out indent guides that are inside a fold
|
||||
!snapshot.is_line_folded(MultiBufferRow(
|
||||
indent_guide.multibuffer_row_range.start.0.saturating_sub(1),
|
||||
))
|
||||
let is_folded = snapshot.is_line_folded(start);
|
||||
let line_indent = snapshot.line_indent_for_buffer_row(start);
|
||||
|
||||
let contained_in_fold =
|
||||
line_indent.len(indent_guide.tab_size) <= indent_guide.indent_level();
|
||||
|
||||
!(is_folded && contained_in_fold)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -1102,6 +1102,35 @@ impl SearchableItem for Editor {
|
||||
});
|
||||
}
|
||||
}
|
||||
fn replace_all(
|
||||
&mut self,
|
||||
matches: &mut dyn Iterator<Item = &Self::Match>,
|
||||
query: &SearchQuery,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let text = self.buffer.read(cx);
|
||||
let text = text.snapshot(cx);
|
||||
let mut edits = vec![];
|
||||
for m in matches {
|
||||
let text = text.text_for_range(m.clone()).collect::<Vec<_>>();
|
||||
let text: Cow<_> = if text.len() == 1 {
|
||||
text.first().cloned().unwrap().into()
|
||||
} else {
|
||||
let joined_chunks = text.join("");
|
||||
joined_chunks.into()
|
||||
};
|
||||
|
||||
if let Some(replacement) = query.replacement_for(&text) {
|
||||
edits.push((m.clone(), Arc::from(&*replacement)));
|
||||
}
|
||||
}
|
||||
|
||||
if !edits.is_empty() {
|
||||
self.transact(cx, |this, cx| {
|
||||
this.edit(edits, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
fn match_index_for_direction(
|
||||
&mut self,
|
||||
matches: &[Range<Anchor>],
|
||||
|
||||